Compare commits

...

5 Commits

32 changed files with 5754 additions and 620 deletions

2
.gitignore vendored
View File

@@ -14,7 +14,7 @@ controllerBuilds
lstV2/frontend/.tanstack lstV2/frontend/.tanstack
mobileLst mobileLst
keys
# Logs # Logs
logs logs
*.log *.log

View File

@@ -1,5 +1,6 @@
dist dist
frontend/dist frontend/dist
mobileLst/dist
lstDocs/build lstDocs/build
migrations migrations
Dockerfile Dockerfile

14
.vscode/launch.json vendored
View File

@@ -1,14 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Attach to packager",
"cwd": "${workspaceFolder}",
"type": "reactnative",
"request": "attach"
}
]
}

View File

@@ -1,48 +0,0 @@
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"workbench.colorTheme": "Default Dark+",
"prettier.tabWidth": 4,
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"[javascript]": {
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"]
}

68
.vscode/settings.json vendored
View File

@@ -1,19 +1,59 @@
{ {
"editor.defaultFormatter": "biomejs.biome", "editor.defaultFormatter": "biomejs.biome",
"workbench.colorTheme": "Default Dark+", "workbench.colorTheme": "Default Dark+",
"terminal.integrated.env.windows": {}, "terminal.integrated.env.windows": {},
"editor.formatOnSave": true, "editor.formatOnSave": true,
"editor.codeActionsOnSave": { "editor.codeActionsOnSave": {
"source.fixAll.biome": "explicit", "source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit" "source.organizeImports.biome": "explicit"
}, },
"cSpell.words": [ "[javascript]": {
"acitve", "editor.formatOnSave": true
"alpla", },
"alplamart", "[javascriptreact]": {
"alplaprod", "editor.formatOnSave": true
"intiallally", },
"ppoo", "[typescript]": {
"prodlabels" "editor.formatOnSave": true
] },
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": [
"acitve",
"alpla",
"alplamart",
"alplaprod",
"intiallally",
"ppoo",
"prodlabels"
]
} }

View File

@@ -5,13 +5,13 @@ meta {
} }
get { get {
url: {{url}}/lst/old/api/eom/histinv?month=2025-11-01 url: {{url}}/lst/old/api/eom/histinv?month=2025/11/1
body: none body: none
auth: inherit auth: inherit
} }
params:query { params:query {
month: 2025-11-01 month: 2025/11/1
} }
settings { settings {

View File

@@ -0,0 +1,16 @@
meta {
name: materialPerDay
type: http
seq: 2
}
get {
url: {{urlv2}}/api/notify/materialperday
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,25 @@
meta {
name: Add pallet
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/quality/newrequest
body: json
auth: inherit
}
body:json {
{
"username": "matthes01",
"runningNr": 618302,
"palletStatusText":"return" // returned will be the only allowed key
//"moveTo": "hold area" //hold area, rework, inspection
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Get Pallets
type: http
seq: 1
}
get {
url: {{url}}/lst/old/api/quality/getrequest
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Quality
seq: 7
}
auth {
mode: inherit
}

View File

@@ -5,11 +5,16 @@ meta {
} }
get { get {
url: url: {{url}}/lst/api/user/me
body: none body: none
auth: inherit auth: bearer
}
auth:bearer {
token: jpHHbLNGJRpUMvfrVOYmhbJL2Ux0arse
} }
settings { settings {
encodeUrl: true encodeUrl: true
timeout: 0
} }

View File

@@ -0,0 +1,20 @@
meta {
name: otacheck
type: http
seq: 3
}
get {
url: http://10.193.0.56:4000/api/mobile/updates
body: none
auth: inherit
}
headers {
expo-runtime-version: 1.0.0
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -3,6 +3,7 @@ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import { toNodeHandler } from "better-auth/node"; import { toNodeHandler } from "better-auth/node";
import cors from "cors"; import cors from "cors";
import express from "express"; import express from "express";
import fs from "fs";
import { createServer } from "http"; import { createServer } from "http";
import { createProxyMiddleware, fixRequestBody } from "http-proxy-middleware"; import { createProxyMiddleware, fixRequestBody } from "http-proxy-middleware";
import morgan from "morgan"; import morgan from "morgan";
@@ -13,6 +14,7 @@ import swaggerUi from "swagger-ui-express";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
import { userMigrate } from "./src/internal/auth/controller/userMigrate.js"; import { userMigrate } from "./src/internal/auth/controller/userMigrate.js";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js"; import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
import { setupMobileRoutes } from "./src/internal/mobile/route.js";
import { printers } from "./src/internal/ocp/printers/printers.js"; import { printers } from "./src/internal/ocp/printers/printers.js";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js"; import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { baseModules } from "./src/internal/system/controller/modules/baseModules.js"; import { baseModules } from "./src/internal/system/controller/modules/baseModules.js";
@@ -156,12 +158,20 @@ const main = async () => {
}, },
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"], methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true, credentials: true,
exposedHeaders: ["set-cookie"], exposedHeaders: [
"set-cookie",
"expo-protocol-version",
"expo-sfv-version",
],
allowedHeaders: [ allowedHeaders: [
"Content-Type", "Content-Type",
"Authorization", "Authorization",
"X-Requested-With", "X-Requested-With",
"XMLHttpRequest", "XMLHttpRequest",
"expo-runtime-version",
"expo-platform",
"expo-channel-name",
"*",
], ],
}), }),
); );
@@ -188,22 +198,6 @@ const main = async () => {
res.sendFile(join(__dirname, "../lstDocs/build/index.html")); res.sendFile(join(__dirname, "../lstDocs/build/index.html"));
}); });
// app ota updates
app.use(
basePath + "/api/mobile/updates",
express.static(join(__dirname, "../mobileLst/dist"), {
setHeaders(res) {
// OTA runtime needs to fetch these from the device
console.log("OTA check called");
res.setHeader("Access-Control-Allow-Origin", "*");
},
}),
);
app.get(basePath + "/api/mobile", (_, res) =>
res.status(200).json({ message: "LST OTA server is up." }),
);
// server setup // server setup
const server = createServer(app); const server = createServer(app);
@@ -223,7 +217,7 @@ const main = async () => {
// start up the v1listener // start up the v1listener
v1Listener(); v1Listener();
addListeners(); addListeners();
userMigrate(); //userMigrate();
// some temp fixes // some temp fixes
manualFixes(); manualFixes();

View File

@@ -0,0 +1,211 @@
import type { Express, Request, Response } from "express";
import express, { Router } from "express";
import { readdirSync, readFileSync, statSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import crypto from "crypto";
import fs from "fs";
export const setupMobileRoutes = (app: Express, basePath: string) => {
const router = Router();
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const distPath = join(__dirname, "../../../../mobileLst/dist");
function generateAssetManifest(baseUrl: string) {
const assets: any[] = [];
const assetsDir = join(distPath, "assets");
try {
if (!fs.existsSync(assetsDir)) {
return assets;
}
const files = readdirSync(assetsDir);
files.forEach((file) => {
const filePath = join(assetsDir, file);
const stats = statSync(filePath);
if (stats.isFile()) {
const content = readFileSync(filePath);
const hash = crypto
.createHash("sha256")
.update(content)
.digest("hex");
assets.push({
hash: hash,
key: file,
fileExtension: `.${file.split(".").pop()}`,
contentType: getContentType(file),
url: `${baseUrl}/assets/${file}`,
});
}
});
} catch (err) {
console.log("Error reading assets:", err);
}
return assets;
}
function getContentType(filename: string): string {
const ext = filename.split(".").pop()?.toLowerCase();
const contentTypes: { [key: string]: string } = {
hbc: "application/javascript",
bundle: "application/javascript",
js: "application/javascript",
json: "application/json",
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
ttf: "font/ttf",
otf: "font/otf",
woff: "font/woff",
woff2: "font/woff2",
};
return contentTypes[ext || ""] || "application/octet-stream";
}
app.get(basePath + "/api/mobile/updates", (req, res) => {
console.log("=== OTA Update Request ===");
console.log("Headers:", JSON.stringify(req.headers, null, 2));
const runtimeVersion = req.headers["expo-runtime-version"];
const platform = req.headers["expo-platform"] || "android";
const expectedRuntimeVersion = "1.0.0";
if (runtimeVersion !== expectedRuntimeVersion) {
console.log(
`Runtime mismatch: got ${runtimeVersion}, expected ${expectedRuntimeVersion}`
);
return res.status(404).json({
error: "No update available for this runtime version",
requestedVersion: runtimeVersion,
availableVersion: expectedRuntimeVersion,
});
}
try {
// const host = req.get('host');
// // If it's the production domain, force https
// const protocol = host.includes('alpla.net') ? 'https' : req.protocol;
// const baseUrl = `${protocol}://${host}/lst/api/mobile/updates`
const host = req.get('host'); // Should be "usmcd1vms036:4000"
const protocol = 'http';
const baseUrl = `${protocol}://${host}/api/mobile/updates`;
// Find the .hbc file
const bundleDir = join(distPath, "_expo/static/js/android");
if (!fs.existsSync(bundleDir)) {
console.error("Bundle directory does not exist:", bundleDir);
return res
.status(500)
.json({ error: "Bundle directory not found" });
}
const bundleFiles = readdirSync(bundleDir);
console.log("Available bundle files:", bundleFiles);
const bundleFile = bundleFiles.find((f) => f.endsWith(".hbc"));
if (!bundleFile) {
console.error("No .hbc file found in:", bundleDir);
return res
.status(500)
.json({ error: "Hermes bundle (.hbc) not found" });
}
console.log("Using bundle file:", bundleFile);
const bundlePath = join(bundleDir, bundleFile);
const bundleContent = readFileSync(bundlePath);
const bundleHash = crypto
.createHash("sha256")
.update(bundleContent)
.digest("hex");
const updateId = crypto.randomUUID();
const createdAt = new Date().toISOString();
// This is the NEW manifest format for Expo SDK 50+
const manifest = {
id: updateId,
createdAt: createdAt,
runtimeVersion: expectedRuntimeVersion,
launchAsset: {
hash: bundleHash,
key: bundleFile,
contentType: "application/javascript",
fileExtension: ".hbc",
url: `${baseUrl}/_expo/static/js/android/${bundleFile}`,
},
assets: generateAssetManifest(baseUrl),
metadata: {},
extra: {
expoClient: {
name: "LSTScanner",
slug: "lst-scanner-app",
version: "1.0.0",
runtimeVersion: expectedRuntimeVersion,
},
},
};
console.log(
"Returning manifest:",
JSON.stringify(manifest, null, 2)
);
res.setHeader("Content-Type", "application/json");
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("expo-protocol-version", "1");
res.setHeader("expo-sfv-version", "0");
res.json(manifest);
} catch (error: any) {
console.error("Error generating manifest:", error);
res.status(500).json({
error: "Failed to generate manifest",
details: error.message,
stack: error.stack,
});
}
});
// Serve static files
app.use(
basePath + "/api/mobile/updates",
express.static(distPath, {
setHeaders(res, path) {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Cache-Control", "public, max-age=31536000");
if (path.endsWith(".hbc")) {
res.setHeader("Content-Type", "application/javascript");
}
},
})
);
// app.use(
// basePath + "/api/mobile/updates",
// express.static(join(__dirname, mobileDir), {
// setHeaders(res) {
// // OTA runtime needs to fetch these from the device
// console.log("OTA check called");
// res.setHeader("Access-Control-Allow-Origin", "*");
// },
// })
// );
// app.get(basePath + "/api/mobile/updates", (req, res) => {
// res.redirect(basePath + "/api/mobile/updates/metadata.json");
// });
app.get(basePath + "/api/mobile", (_, res) =>
res.status(200).json({ message: "LST OTA server is up." })
);
};

View File

@@ -4,22 +4,24 @@ import { setupAuthRoutes } from "../auth/routes/routes.js";
import { setupForkliftRoutes } from "../forklifts/routes/routes.js"; import { setupForkliftRoutes } from "../forklifts/routes/routes.js";
import { setupLogisticsRoutes } from "../logistics/routes.js"; import { setupLogisticsRoutes } from "../logistics/routes.js";
import { setupSystemRoutes } from "../system/routes.js"; import { setupSystemRoutes } from "../system/routes.js";
import { setupMobileRoutes } from "../mobile/route.js";
export const setupRoutes = (app: Express, basePath: string) => { export const setupRoutes = (app: Express, basePath: string) => {
// all routes // all routes
setupAuthRoutes(app, basePath); setupAuthRoutes(app, basePath);
setupAdminRoutes(app, basePath); setupAdminRoutes(app, basePath);
setupSystemRoutes(app, basePath); setupSystemRoutes(app, basePath);
setupLogisticsRoutes(app, basePath); setupLogisticsRoutes(app, basePath);
setupForkliftRoutes(app, basePath); setupForkliftRoutes(app, basePath);
setupMobileRoutes(app, basePath);
// always try to go to the app weather we are in dev or in production. // always try to go to the app weather we are in dev or in production.
app.get(basePath + "/", (req: Request, res: Response) => { app.get(basePath + "/", (req: Request, res: Response) => {
res.redirect(basePath + "/app"); res.redirect(basePath + "/app");
}); });
// Fallback 404 handler // Fallback 404 handler
app.use((req: Request, res: Response) => { app.use((req: Request, res: Response) => {
res.status(404).json({ error: "Not Found" }); res.status(404).json({ error: "Not Found" });
}); });
}; };

View File

@@ -3,53 +3,72 @@ import ForecastImport from "./ForecastImport";
import OrderImport from "./OrderImport"; import OrderImport from "./OrderImport";
export default function DMButtons() { export default function DMButtons() {
const { settings } = useSettingStore(); const { settings } = useSettingStore();
const testServers = ["test1", "test2", "test3"]; const testServers = ["test1", "test2", "test3"];
const plantToken = settings.filter((n) => n.name === "plantToken"); const plantToken = settings.filter((n) => n.name === "plantToken");
//console.log(plantToken);
return ( //console.log(plantToken);
<div className="flex flex-row-reverse gap-1"> return (
<OrderImport fileType={"macro"} name={"Macro Import"} /> <div className="flex flex-row-reverse gap-1">
{/* dev and testserver sees all */} <OrderImport fileType={"macro"} name={"Macro Import"} />
{testServers.includes(plantToken[0]?.value) && ( {/* dev and testserver sees all */}
<div className="flex flex-row gap-2"> {testServers.includes(plantToken[0]?.value) && (
<OrderImport fileType={"abbott"} name={"Abbott truck list"} /> <div className="flex flex-row gap-2">
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} /> <OrderImport
<ForecastImport fileType={"loreal"} name={"VMI Import"} /> fileType={"abbott"}
<ForecastImport fileType={"pg"} name={"P&G"} /> name={"Abbott truck list"}
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} /> />
</div> <OrderImport
)} fileType={"energizer"}
{plantToken[0]?.value === "usday1" && ( name={"Energizer Truck List"}
<div className="flex flex-row gap-2"> />
<OrderImport fileType={"abbott"} name={"Abbott truck list"} /> <ForecastImport fileType={"loreal"} name={"VMI Import"} />
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} /> <ForecastImport fileType={"pg"} name={"P&G"} />
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} /> <ForecastImport
</div> fileType={"energizer"}
)} name={"Energizer Forecast"}
{plantToken[0]?.value === "usflo1" && ( />
<div className="flex flex-row gap-2"> </div>
<ForecastImport fileType={"loreal"} name={"VMI Import"} /> )}
</div> {plantToken[0]?.value === "usday1" && (
)} <div className="flex flex-row gap-2">
{plantToken[0]?.value === "usstp1" && ( <OrderImport
<div className="flex flex-row gap-2"></div> fileType={"abbott"}
)} name={"Abbott truck list"}
{plantToken[0]?.value === "usiow1" && ( />
<div className="flex flex-row gap-2"> <OrderImport
<ForecastImport fileType={"pg"} name={"P&G"} /> fileType={"energizer"}
</div> name={"Energizer Truck List"}
)} />
{plantToken[0]?.value === "usiow2" && ( <ForecastImport
<div className="flex flex-row gap-2"> fileType={"energizer"}
<ForecastImport fileType={"pg"} name={"P&G"} /> name={"Energizer Forecast"}
</div> />
)} </div>
{plantToken[0]?.value === "usksc1" && ( )}
<div className="flex flex-row gap-2"> {plantToken[0]?.value === "usflo1" && (
<ForecastImport fileType={"pg"} name={"P&G"} /> <div className="flex flex-row gap-2">
</div> <ForecastImport fileType={"loreal"} name={"VMI Import"} />
)} </div>
</div> )}
); {plantToken[0]?.value === "usstp1" && (
<div className="flex flex-row gap-2"></div>
)}
{plantToken[0]?.value === "usiow1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usiow2" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usksc1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
</div>
);
} }

View File

@@ -3,70 +3,80 @@ import { useRef, useState } from "react";
import { toast } from "sonner"; import { toast } from "sonner";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { useAuth } from "@/lib/authClient"; import { useAuth } from "@/lib/authClient";
import { useNavigate, useRouterState } from "@tanstack/react-router";
export default function ForecastImport(props: any) { export default function ForecastImport(props: any) {
const fileInputRef: any = useRef(null); const fileInputRef: any = useRef(null);
const [posting, setPosting] = useState(false); const [posting, setPosting] = useState(false);
//const token = localStorage.getItem("auth_token"); //const token = localStorage.getItem("auth_token");
const { session } = useAuth(); const { session } = useAuth();
//const [fileType, setFileType] = useState(""); //const [fileType, setFileType] = useState("");
const importOrders = async (e: any) => { const navigate = useNavigate();
const file = e.target.files[0]; const router = useRouterState();
if (!file) { const currentPath = router.location.href;
toast.error("Missing file please try again");
setPosting(false);
return;
}
// create the form data with the correct fileType const importOrders = async (e: any) => {
const formData = new FormData(); if (!session || !session.user) {
formData.append("postForecast", e.target.files[0]); toast.error("You are allowed to do this unless you are logged in");
formData.append("fileType", props.fileType); // extra field navigate({ to: "/login", search: { redirect: currentPath } });
formData.append("username", `${session?.user.username}`); return;
}
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
setPosting(false);
return;
}
// console.log(formData); // create the form data with the correct fileType
toast.success("Import started."); const formData = new FormData();
try { formData.append("postForecast", e.target.files[0]);
const response = await axios.post( formData.append("fileType", props.fileType); // extra field
"/lst/old/api/logistics/postforecastin", formData.append("username", `${session?.user.username}`);
formData,
{
headers: {
"Content-Type": "multipart/form-data",
},
},
);
//console.log("Upload successful:", response.data);
toast.success(response?.data?.message);
fileInputRef.current.value = null;
setPosting(false);
// toast.success(
// "File Uploaded, please validate processing in alplaprod 2.0"
// );
setPosting(false);
} catch (error) {
console.log(error);
toast.error("Upload failed");
}
setPosting(false);
};
const handleButtonClick = () => { // console.log(formData);
setPosting(true); toast.success("Import started.");
fileInputRef.current.click(); try {
}; const response = await axios.post(
return ( "/lst/old/api/logistics/postforecastin",
<div> formData,
<Button onClick={handleButtonClick} disabled={posting}> {
{props.name} headers: {
</Button> "Content-Type": "multipart/form-data",
<input },
type="file" }
accept=".xlsx, .xls, .xlsm" );
ref={fileInputRef} //console.log("Upload successful:", response.data);
style={{ display: "none" }} toast.success(response?.data?.message);
onChange={importOrders} fileInputRef.current.value = null;
/> setPosting(false);
</div> // toast.success(
); // "File Uploaded, please validate processing in alplaprod 2.0"
// );
setPosting(false);
} catch (error) {
console.log(error);
toast.error("Upload failed");
}
setPosting(false);
};
const handleButtonClick = () => {
setPosting(true);
fileInputRef.current.click();
};
return (
<div>
<Button onClick={handleButtonClick} disabled={posting}>
{props.name}
</Button>
<input
type="file"
accept=".xlsx, .xls, .xlsm"
ref={fileInputRef}
style={{ display: "none" }}
onChange={importOrders}
/>
</div>
);
} }

View File

@@ -3,63 +3,72 @@ import { useRef, useState } from "react";
import { toast } from "sonner"; import { toast } from "sonner";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { useAuth } from "@/lib/authClient"; import { useAuth } from "@/lib/authClient";
import { useNavigate, useRouterState } from "@tanstack/react-router";
export default function OrderImport(props: any) { export default function OrderImport(props: any) {
const fileInputRef: any = useRef(null); const fileInputRef: any = useRef(null);
const [posting, setPosting] = useState(false); const [posting, setPosting] = useState(false);
const { session } = useAuth(); const { session } = useAuth();
//const [fileType, setFileType] = useState(""); const navigate = useNavigate();
const importOrders = async (e: any) => { const router = useRouterState();
const file = e.target.files[0]; const currentPath = router.location.href;
if (!file) { //const [fileType, setFileType] = useState("");
toast.error("Missing file please try again"); const importOrders = async (e: any) => {
setPosting(false); if (!session || !session.user) {
return; toast.error("You are allowed to do this unless you are logged in");
} navigate({ to: "/login", search: { redirect: currentPath } });
return;
}
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
setPosting(false);
return;
}
// create the form data with the correct fileType // create the form data with the correct fileType
const formData = new FormData(); const formData = new FormData();
formData.append("postOrders", e.target.files[0]); formData.append("postOrders", e.target.files[0]);
formData.append("fileType", props.fileType); // extra field formData.append("fileType", props.fileType); // extra field
formData.append("username", `${session?.user.username}`); formData.append("username", `${session?.user.username}`);
try { try {
const response = await axios.post( const response = await axios.post(
"/lst/old/api/logistics/postbulkorders", "/lst/old/api/logistics/postbulkorders",
formData, formData,
{ {
headers: { headers: {
"Content-Type": "multipart/form-data", "Content-Type": "multipart/form-data",
}, },
}, }
); );
//console.log("Upload successful:", response.data); //console.log("Upload successful:", response.data);
toast.success(response?.data?.message); toast.success(response?.data?.message);
fileInputRef.current.value = null; fileInputRef.current.value = null;
setPosting(false); setPosting(false);
} catch (error) { } catch (error) {
console.log(error); console.log(error);
toast.error("Upload failed"); toast.error("Upload failed");
} }
setPosting(false); setPosting(false);
}; };
const handleButtonClick = () => { const handleButtonClick = () => {
setPosting(true); setPosting(true);
fileInputRef.current.click(); fileInputRef.current.click();
}; };
return ( return (
<div> <div>
<Button onClick={handleButtonClick} disabled={posting}> <Button onClick={handleButtonClick} disabled={posting}>
{props.name} {props.name}
</Button> </Button>
<input <input
type="file" type="file"
accept=".xlsx, .xls, .xlsm" accept=".xlsx, .xls, .xlsm"
ref={fileInputRef} ref={fileInputRef}
style={{ display: "none" }} style={{ display: "none" }}
onChange={importOrders} onChange={importOrders}
/> />
</div> </div>
); );
} }

View File

@@ -0,0 +1 @@
ALTER TABLE "qualityRequest" ADD COLUMN "qualityDurationToInspect" integer;

View File

@@ -0,0 +1 @@
ALTER TABLE "qualityRequest" ADD COLUMN "returnDurationToInspect" integer;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -519,6 +519,20 @@
"when": 1760708711258, "when": 1760708711258,
"tag": "0073_bumpy_dust", "tag": "0073_bumpy_dust",
"breakpoints": true "breakpoints": true
},
{
"idx": 74,
"version": "7",
"when": 1762966327361,
"tag": "0074_overconfident_may_parker",
"breakpoints": true
},
{
"idx": 75,
"version": "7",
"when": 1762983466464,
"tag": "0075_tan_unicorn",
"breakpoints": true
} }
] ]
} }

View File

@@ -25,6 +25,8 @@ export const qualityRequest = pgTable(
warehouseMovedTo: text("warehouseMovedTo"), warehouseMovedTo: text("warehouseMovedTo"),
locationMovedTo: text("locationMovedTo"), locationMovedTo: text("locationMovedTo"),
durationToMove: integer("durationToMove"), durationToMove: integer("durationToMove"),
qualityDurationToInspect: integer("qualityDurationToInspect"),
returnDurationToInspect: integer("returnDurationToInspect"),
locationDropOff: text("locationDropOff"), locationDropOff: text("locationDropOff"),
palletStatus: integer("palletStatus"), palletStatus: integer("palletStatus"),
palletStatusText: text("palletStatusText"), palletStatusText: text("palletStatusText"),

View File

@@ -0,0 +1,88 @@
import {
addDays,
format,
formatISO,
isBefore,
parseISO,
startOfWeek,
} from "date-fns";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { materialPerDay } from "../../../sqlServer/querys/dataMart/materialPerDay.js";
function toDate(val: any) {
if (val instanceof Date) return val;
if (typeof val === "string") return parseISO(val.replace(" ", "T"));
return new Date(val);
}
export function sumByMaterialAndWeek(data: any) {
/** @type {Record<string, Record<string, number>>} */
const grouped: any = {};
for (const r of data) {
const mat = String(r.MaterialHumanReadableId);
const d = toDate(r.CalDate);
const week = formatISO(startOfWeek(d, { weekStartsOn: 1 }), {
representation: "date",
});
grouped[mat] ??= {};
grouped[mat][week] ??= 0;
grouped[mat][week] += Number(r.DailyMaterialDemand) || 0;
}
const result = [];
for (const [mat, weeks] of Object.entries(grouped)) {
// @ts-ignore
for (const [week, total] of Object.entries(weeks)) {
result.push({
MaterialHumanReadableId: mat,
WeekStart: week,
WeeklyDemand: Number(total).toFixed(2),
});
}
}
return result;
}
export default async function materialPerDayCheck() {
/**
* getting the shipped pallets
*/
const startDate = format(new Date(Date.now()), "yyyy-MM-dd");
const endDate = format(addDays(new Date(Date.now()), 90), "yyyy-MM-dd");
const { data, error } = (await tryCatch(
query(
materialPerDay
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"material check",
),
)) as any;
if (error) {
return {
success: false,
message: "Error getting the material data",
error,
};
}
if (!data.success) {
return {
success: false,
message: data.message,
data: [],
};
}
return {
success: true,
message: "material data",
data: sumByMaterialAndWeek(data.data),
};
}

View File

@@ -1,60 +1,60 @@
import { OpenAPIHono } from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
import sendemail from "./routes/sendMail.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { db } from "../../../database/dbclient.js"; import { db } from "../../../database/dbclient.js";
import { notifications } from "../../../database/schema/notifications.js"; import { notifications } from "../../../database/schema/notifications.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { createLog } from "../logger/logger.js"; import { createLog } from "../logger/logger.js";
import fifoIndex from "./routes/fifoIndex.js";
import notifyStats from "./routes/getActiveNotifications.js";
import notify from "./routes/getNotifications.js";
import tiTrigger from "./routes/manualTiggerTi.js";
import materialCheck from "./routes/materialPerDay.js";
import blocking from "./routes/qualityBlocking.js";
import sendemail from "./routes/sendMail.js";
import { note, notificationCreate } from "./utils/masterNotifications.js"; import { note, notificationCreate } from "./utils/masterNotifications.js";
import { startNotificationMonitor } from "./utils/processNotifications.js"; import { startNotificationMonitor } from "./utils/processNotifications.js";
import notifyStats from "./routes/getActiveNotifications.js";
import tiTrigger from "./routes/manualTiggerTi.js";
import blocking from "./routes/qualityBlocking.js";
import notify from "./routes/getNotifications.js";
import fifoIndex from "./routes/fifoIndex.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
const routes = [ const routes = [
sendemail, sendemail,
notifyStats, notifyStats,
tiTrigger, tiTrigger,
blocking, blocking,
notify, notify,
fifoIndex, fifoIndex,
materialCheck,
] as const; ] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {
app.route("/notify", route); app.route("/notify", route);
}); });
app.all("/notify/*", (c) => { app.all("/notify/*", (c) => {
return c.json({ return c.json({
success: false, success: false,
message: "you have encounted a notication route that dose not exist.", message: "you have encounted a notication route that dose not exist.",
}); });
}); });
// check if the mastNotications is changed compared to the db and add if needed. // check if the mastNotications is changed compared to the db and add if needed.
const { data: notes, error: notesError } = await tryCatch( const { data: notes, error: notesError } = await tryCatch(
db.select().from(notifications) db.select().from(notifications),
); );
if (notesError) { if (notesError) {
createLog( createLog(
"error", "error",
"notify", "notify",
"notify", "notify",
`There was an error getting the notifications: ${JSON.stringify( `There was an error getting the notifications: ${JSON.stringify(
notesError notesError,
)}` )}`,
); );
} }
setTimeout(() => { setTimeout(() => {
notificationCreate(); notificationCreate();
startNotificationMonitor(); startNotificationMonitor();
}, 5 * 1000); }, 5 * 1000);
export default app; export default app;

View File

@@ -0,0 +1,37 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import fifoIndexCheck from "../controller/notifications/fifoIndex.js";
import materialPerDayCheck from "../controller/notifications/materialPerDay.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["notify"],
summary: "",
method: "get",
path: "/materialperday",
//middleware: authMiddleware,
responses: responses(),
}),
async (c) => {
/**
* get the blocking notification stuff
*/
apiHit(c, { endpoint: "/materialperday" });
/**
* getting the shipped pallets
*/
const checkedData = await materialPerDayCheck();
return c.json({
success: checkedData.success,
message: checkedData.message,
data: checkedData.data,
});
},
);
export default app;

View File

@@ -1,171 +1,180 @@
import { differenceInMinutes } from "date-fns";
import { eq, sql } from "drizzle-orm"; import { eq, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js"; import { db } from "../../../../database/dbclient.js";
import { qualityRequest } from "../../../../database/schema/qualityRequest.js"; import { qualityRequest } from "../../../../database/schema/qualityRequest.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js"; import { query } from "../../sqlServer/prodSqlServer.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js"; import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
export const addNewPallet = async (data: any, user: string) => { export const addNewPallet = async (data: any) => {
/** /**
* Post new pallets * Post new pallets
*/ */
if (parseInt(data.runningNr) <= 0) { if (parseInt(data.runningNr) <= 0) {
return { return {
sucess: false, success: false,
message: "Please add a valid running number.", message: "Please add a valid running number.",
}; };
} }
const updateQuery = qrequestQuery.replaceAll( const updateQuery = qrequestQuery.replaceAll(
"[runningNumber]", "[runningNumber]",
data.runningNr data.runningNr,
); );
const { data: c, error: ce } = await tryCatch( const { data: c, error: ce } = await tryCatch(
db db
.select() .select()
.from(qualityRequest) .from(qualityRequest)
.where(eq(qualityRequest.runningNr, data.runningNr)) .where(eq(qualityRequest.runningNr, data.runningNr)),
); );
if (ce) { if (ce) {
return { return {
success: false, success: false,
message: "There was an error getting the quality request", message: "There was an error getting the quality request",
data: ce, data: ce,
}; };
} }
const palletData: any = c; const palletData: any = c;
// if the pallet exist then tell the user to check on it // if the pallet exist then tell the user to check on it
if ( const pStatus = [1, 4, 6];
(palletData && palletData[0]?.palletStatus === 1) || if (palletData && pStatus.includes(palletData[0]?.palletStatus)) {
palletData[0]?.palletStatus === 4 return {
) { success: false,
return { message: `Running number ${data.runningNr} is already pending or reactivated please follow up with the warehouse team on status to be moved.`,
success: false, };
message: `Running number ${data.runningNr} is already pending or reactivated please follow up with the warehouse team on status to be moved.`, }
};
}
if (palletData.length > 0) { // update the existing pallet if already in the system
try { if (palletData.length > 0) {
// get the pallet info from stock try {
const { data: pa, error: pe } = await tryCatch( // get the pallet info from stock
query(updateQuery, "quality request") const { data: pa, error: pe } = await tryCatch(
); query(updateQuery, "quality request"),
const p: any = pa; );
if (pe) { const p: any = pa ? pa.data : [];
return { if (pe) {
success: false, return {
message: "There was an error getting the pallet from stock", success: false,
data: pe, message: "There was an error getting the pallet from stock",
}; data: pe,
} };
const pData = { }
warehouseAtRequest: p[0].warehouseAtRequest, const pData = {
locationAtRequest: p[0].locationAtRequest, warehouseAtRequest: p[0].warehouseAtRequest,
warehouseMovedTo: null, locationAtRequest: p[0].locationAtRequest,
locationMovedTo: null, warehouseMovedTo: null,
palletStatus: 4, locationMovedTo: null,
durationToMove: 0, palletStatus: data.palletStatusText === "return" ? 6 : 4,
palletStatusText: "reactivated", //durationToMove: 0,
palletRequest: palletData[0].palletStatus + 1, palletStatusText:
upd_user: user, data.palletStatusText === "return" ? "return" : "reactivated",
upd_date: new Date(timeZoneFix()), qualityDurationToInspect:
}; data.palletStatusText === "return"
? differenceInMinutes(new Date(Date.now()), p[0].lastMove)
: 0,
locationDropOff:
data.palletStatusText === "return" ? "Return to warhouse" : "",
palletRequest: palletData[0].palletStatus + 1,
upd_user: data.user,
upd_date: sql`NOW()`,
};
const { data: u, error } = await tryCatch( const { data: u, error } = await tryCatch(
db db
.update(qualityRequest) .update(qualityRequest)
.set(pData) .set(pData)
.where(eq(qualityRequest.runningNr, data.runningNr)) .where(eq(qualityRequest.runningNr, data.runningNr)),
); );
if (error) { if (error) {
return { return {
success: false, success: false,
message: `Running number: ${data.runningNr} encountered and error reactivated.`, message: `Running number: ${data.runningNr} encountered and error reactivated.`,
data: error, data: error,
}; };
} }
if (data) { if (data) {
return { return {
success: true, success: true,
message: `Running number: ${data.runningNr} was just reactivated.`, message: `Running number: ${data.runningNr} was just reactivated.`,
data: u, data: u,
}; };
} }
} catch (error) { } catch (error) {
return { console.log(error);
success: false, return {
message: success: false,
"There was an error updating the pallet in quality request", message: "There was an error updating the pallet in quality request",
data: error, data: error,
}; };
} }
} }
// add new pallet // add new pallet
try { try {
const { data: px, error: pe } = await tryCatch( const { data: px, error: pe } = await tryCatch(
query(updateQuery, "quality request") query(updateQuery, "quality request"),
); );
const p: any = px;
if (p.length === 0) {
return {
success: false,
message: `Running Number ${data.runningNr} dose not exist in stock.`,
};
}
if (pe) { const p: any = px ? px.data : [];
return { if (p.length === 0) {
success: false, return {
message: "There was an error getting the pallet from stock", success: false,
data: pe, message: `Running Number ${data.runningNr} dose not exist in stock, please check the running number and try again.`,
}; };
} }
const nData = { if (pe) {
article: p[0].article, return {
description: p[0].description, success: false,
runningNr: p[0].runningNr, message: "There was an error getting the pallet from stock",
lotNr: p[0].lotNr, data: pe,
warehouseAtRequest: p[0].warehouseAtRequest, };
locationAtRequest: p[0].locationAtRequest, }
locationDropOff: data.moveTo, console.log(p);
palletStatus: 1, const nData = {
palletStatusText: "pending", article: p[0].article,
palletRequest: 1, description: p[0].description,
add_user: user, runningNr: p[0].runningNr,
upd_user: user, lotNr: p[0].lotNr,
}; warehouseAtRequest: p[0].warehouseAtRequest,
locationAtRequest: p[0].locationAtRequest,
locationDropOff: data.moveTo,
palletStatus: 1,
palletStatusText: "pending",
palletRequest: 1,
add_user: data.user,
upd_user: data.user,
};
const { data: u, error } = await tryCatch( const { data: u, error } = await tryCatch(
db.insert(qualityRequest).values(nData) db.insert(qualityRequest).values(nData).onConflictDoNothing(),
); );
if (error) { if (error) {
return { return {
success: false, success: false,
message: `Running number: ${data.runningNr} encountered and error reactivated.`, message: `Running number: ${data.runningNr} encountered and error reactivated.`,
data: error, data: error,
}; };
} }
if (data) { if (data) {
return { return {
success: true, success: true,
message: `Running number: ${data.runningNr} was just added.`, message: `Running number: ${data.runningNr} was just added.`,
data: u, data: u,
}; };
} }
} catch (error) { } catch (error) {
return { console.log(error);
success: false, return {
message: "There was an error adding the pallet in quality request", success: false,
data: error, message: "There was an error adding the pallet in quality request",
}; data: error,
} };
}
}; };

View File

@@ -1,110 +1,127 @@
import { eq, inArray } from "drizzle-orm"; import { differenceInMinutes } from "date-fns";
import { eq, inArray, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js"; import { db } from "../../../../database/dbclient.js";
import { qualityRequest } from "../../../../database/schema/qualityRequest.js"; import { qualityRequest } from "../../../../database/schema/qualityRequest.js";
import { delay } from "../../../globalUtils/delay.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js"; import { createLog } from "../../logger/logger.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
import { query } from "../../sqlServer/prodSqlServer.js"; import { query } from "../../sqlServer/prodSqlServer.js";
import { differenceInMinutes } from "date-fns"; import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
export const qualityCycle = async () => { export const qualityCycle = async () => {
/** /**
* Cycles the pallets in the quality request to see whats been moved or changed. * Cycles the pallets in the quality request to see whats been moved or changed.
*/ */
const warehouse = [1, 4, 5];
// pallet request check interval 5min check to start // pallet request check interval 5min check to start
//setInterval(async () => { setInterval(
// create the date stuff async () => {
const currentTime = new Date(Date.now()); // create the date stuff
const currentTime = new Date(Date.now());
// pull in all current pallets from our db // pull in all current pallets from our db
const { data, error } = await tryCatch( const { data, error } = await tryCatch(
db db
.select() .select()
.from(qualityRequest) .from(qualityRequest)
.where(inArray(qualityRequest.palletStatus, [1, 4, 5])) .where(inArray(qualityRequest.palletStatus, [1, 4, 5, 6])),
); );
if (error) { if (error) {
createLog( createLog(
"error", "error",
"lst", "lst",
"quality", "quality",
`There was an error getting quality request data: ${error}` `There was an error getting quality request data: ${error}`,
); );
return { return {
success: false, success: false,
message: "There was an error getting quality request data", message: "There was an error getting quality request data",
}; };
} }
const lstQData: any = data; const lstQData: any = data;
// get the pallets that currentStat is moved // get the pallets that currentStat is moved
// const res = await runQuery(palletMoveCheck, "palletCheck"); // const res = await runQuery(palletMoveCheck, "palletCheck");
if (lstQData.length != 0) { if (lstQData.length != 0) {
for (let i = 0; i < lstQData.length; i++) { for (let i = 0; i < lstQData.length; i++) {
// run the pallet query we will compare the data. // run the pallet query we will compare the data.
// console.log(lstQData[i]); // console.log(lstQData[i]);
//update query with plant token //update query with plant token
// change the update the pallet number // update the the pallet number
const qPalletNumber = qrequestQuery.replaceAll( const qPalletNumber = qrequestQuery.replaceAll(
"[runningNumber]", "[runningNumber]",
lstQData[i].runningNr lstQData[i].runningNr,
); );
const queryData: any = await query( const queryData: any = await query(
qPalletNumber, qPalletNumber,
"Quality update check" "Quality update check",
); );
let prodData: any = let prodData: any =
queryData?.data.length === 0 ? [] : queryData.data; queryData?.data.length === 0 ? [] : queryData.data;
if ( if (
lstQData[i]?.locationAtRequest != prodData[0]?.locationAtRequest lstQData[i]?.locationAtRequest != prodData[0]?.locationAtRequest
) { ) {
// time to do the pallet update stuff // time to do the pallet update stuff
const qDataPost = { const qDataPost = {
warehouseMovedTo: prodData[0]?.warehouseAtRequest, warehouseMovedTo: prodData[0]?.warehouseAtRequest,
locationMovedTo: prodData[0]?.locationAtRequest, locationMovedTo: prodData[0]?.locationAtRequest,
durationToMove: differenceInMinutes( // how ling did it take the warhouse to originally move the pallet
timeZoneFix(), durationToMove: warehouse.includes(lstQData[i].palletStatus)
lstQData[i].upd_date ? differenceInMinutes(
), new Date(Date.now()),
palletStatus: 2, lstQData[i].upd_date,
palletStatusText: "moved", )
upd_date: new Date(timeZoneFix()), : lstQData[i].durationToMove,
upd_user: "LST_System", // how long did it take warehouse to move the pallet back agian
}; returnDurationToInspect:
lstQData[i].palletStatus === 7
? differenceInMinutes(
new Date(Date.now()),
lstQData[i].upd_date,
)
: lstQData[i].qualityDurationToInspect,
palletStatus: 2,
palletStatusText: "moved",
upd_date: sql`NOW()`,
upd_user: "LST_System",
};
const updatePallet = await db const updatePallet = await db
.update(qualityRequest) .update(qualityRequest)
.set(qDataPost) .set(qDataPost)
.where(eq(qualityRequest.runningNr, lstQData[i].runningNr)); .where(eq(qualityRequest.runningNr, lstQData[i].runningNr));
createLog( createLog(
"info", "info",
"lst", "lst",
"quality", "quality",
`Pallet ${lstQData[i].runningNr} was updated` `Pallet ${lstQData[i].runningNr} was updated`,
); );
} else { } else {
createLog( createLog(
"debug", "info",
"lst", "lst",
"quality", "quality",
`Pallet ${ `Pallet ${
lstQData[i].runningNr lstQData[i].runningNr
} has not been moved yet it has been pending for ${differenceInMinutes( } has not been moved yet it has been pending for ${differenceInMinutes(
timeZoneFix(), new Date(Date.now()),
lstQData[i].upd_date lstQData[i].upd_date,
)} min(s)` )} min(s)`,
); );
} }
} }
} else { await delay(150);
createLog("debug", "lst", "quality", "nothing to update"); } else {
} createLog("info", "lst", "quality", "nothing to update");
//}, 5 * 60 * 1000); // every 5 min }
},
5 * 60 * 1000,
); // every 5 min
}; };

View File

@@ -5,35 +5,37 @@ import postReq from "./route/postNewRequest.js";
// pallet status data. // pallet status data.
export const statusOptions = [ export const statusOptions = [
{ name: "pending", uid: "1" }, { name: "pending", uid: "1" },
{ name: "moved", uid: "2" }, { name: "moved", uid: "2" },
{ name: "removed", uid: "3" }, { name: "removed", uid: "3" },
{ name: "reactivated", uid: "4" }, { name: "reactivated", uid: "4" },
{ name: "canceled", uid: "5" }, { name: "canceled", uid: "5" },
{ name: "return", uid: "6" },
{ name: "readyToReturn", uid: "7" },
]; ];
const app = new OpenAPIHono(); const app = new OpenAPIHono();
const routes = [request, postReq] as const; const routes = [request, postReq] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {
app.route("/quality", route); app.route("/quality", route);
}); });
app.all("/quality/*", (c) => { app.all("/quality/*", (c) => {
return c.json({ return c.json({
success: false, success: false,
message: "You have encounters a quality route that dose not exist.", message: "You have encounters a quality route that dose not exist.",
}); });
}); });
/** /**
* Initial and run the cycle up for checking the pallet moves for quality * Initial and run the cycle up for checking the pallet moves for quality
*/ */
setTimeout(() => { setTimeout(() => {
qualityCycle(); qualityCycle();
}, 1000 * 5); }, 1000 * 5);
setInterval(() => { setInterval(() => {
qualityCycle(); qualityCycle();
}, 1000 * 60); }, 1000 * 60);
export default app; export default app;

View File

@@ -1,74 +1,72 @@
// an external way to creating logs // an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi"; import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js"; import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getRequest } from "../controller/getRequests.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js"; import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { addNewPallet } from "../controller/addNewPallet.js"; import { addNewPallet } from "../controller/addNewPallet.js";
import { verify } from "hono/jwt"; import { getRequest } from "../controller/getRequests.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
const app = new OpenAPIHono({ strict: false }); const app = new OpenAPIHono({ strict: false });
const Body = z.object({ const Body = z.object({
runningNr: z.number().openapi({ example: 1528 }), runningNr: z.number().openapi({ example: 1528 }),
moveTo: z.string().openapi({ example: "rework" }), moveTo: z.string().optional().openapi({ example: "rework" }),
}); });
app.openapi( app.openapi(
createRoute({ createRoute({
tags: ["quality"], tags: ["quality"],
summary: "Returns all pallets requested", summary: "Returns all pallets requested",
method: "post", method: "post",
path: "/newrequest", path: "/newrequest",
middleware: authMiddleware, middleware: authMiddleware,
request: { request: {
body: { body: {
content: { content: {
"application/json": { schema: Body }, "application/json": { schema: Body },
}, },
}, },
}, },
responses: responses(), responses: responses(),
}), }),
async (c) => { async (c) => {
const authHeader = c.req.header("Authorization"); // const authHeader = c.req.header("Authorization");
const token = authHeader?.split("Bearer ")[1] || ""; // const token = authHeader?.split("Bearer ")[1] || "";
const payload = await verify(token, process.env.JWT_SECRET!); // const payload = await verify(token, process.env.JWT_SECRET!);
const user: any = payload.user; // const user: any = payload.user;
const { data: b, error: e } = await tryCatch(c.req.json()); const { data: b, error: e } = await tryCatch(c.req.json());
apiHit(c, { endpoint: "/newrequest", lastBody: b }); apiHit(c, { endpoint: "/newrequest", lastBody: b });
if (e) { if (e) {
return c.json({ return c.json({
success: false, success: false,
message: "Missing Data", message: "Missing Data",
}); });
} }
const body: any = b; const body: any = b;
// console.log(body); // console.log(body);
// if (!body.runningNr) { // if (!body.runningNr) {
// return c.json({ // return c.json({
// success: false, // success: false,
// message: "Missing mandatory data.", // message: "Missing mandatory data.",
// }); // });
// } // }
const { data, error } = await tryCatch( const { data, error } = await tryCatch(addNewPallet(body));
addNewPallet(body, user?.username)
);
if (error) { if (error) {
return c.json({ return c.json({
success: false, success: false,
message: "There was an error adding the new pallet", message: "There was an error adding the new pallet",
}); });
} }
return c.json({ return c.json({
success: data?.success, success: data?.success,
message: data?.message, message: data?.message,
data: data?.data, data: data?.data,
}); });
} },
); );
export default app; export default app;

View File

@@ -0,0 +1,115 @@
/**
* This will return the material demand per day
* startdate and end date should be passed over
*/
export const materialPerDay = `
use [test3_AlplaPROD2.0_Read]
DECLARE @ShiftStartHour INT = 6
declare @startDate nvarchar(max) = '[startDate]'
declare @endDate nvarchar(max) = '[endDate]'
;with Calendar as (
select cast(@startDate as date) CalDate
union all
select dateadd(day,1,CalDate)
from Calendar
where CalDate < @endDate
),
DailySplit AS (
SELECT
-- Lot fields
l.Id AS ProductionLotId,
l.ProductionLotHumanReadableId,
l.ArticleHumanReadableId,
l.ArticleDescription,
l.LocationId,
l.MachineHumanReadableId,
l.MachineDescription,
l.StartDate,
l.FinishDate,
l.ProductionCustomerDescription,
l.ProductionCustomerHumanReadableId,
l.PlannedQuantity,
l.PlannedLoadingUnit,
l.Cavity,
l.Utilisation,
l.TotalMaterialDemand AS LotTotalMaterialDemand,
-- Material fields
m.MaterialHumanReadableId,
m.MaterialDescription,
m.TotalDemand AS LotMaterialTotalDemand,
c.CalDate,
DATEDIFF(SECOND,l.StartDate,l.FinishDate) AS LotDurationSec,
-- build shiftbased 24hour window (e.g. 06:00 → next day06:00)
CASE
WHEN l.StartDate > DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2(7)))
THEN l.StartDate
ELSE DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2(7)))
END AS DayStart,
CASE
WHEN l.FinishDate < DATEADD(SECOND,-0.0000001,
DATEADD(HOUR,@ShiftStartHour,
DATEADD(DAY,1,CAST(c.CalDate AS DATETIME2(7)))))
THEN l.FinishDate
ELSE DATEADD(SECOND,-0.0000001,
DATEADD(HOUR,@ShiftStartHour,
DATEADD(DAY,1,CAST(c.CalDate AS DATETIME2(7)))))
END AS DayEnd
FROM [issueMaterial].[ProductionLot] (nolock) AS l
LEFT JOIN [issueMaterial].[MaterialDemand] (nolock) AS m
ON m.ProductionLotId = l.Id
CROSS JOIN Calendar AS c
WHERE DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2))
< l.FinishDate
AND DATEADD(HOUR,@ShiftStartHour+24,CAST(c.CalDate AS DATETIME2))
> l.StartDate
--and l.[ProductionLotHumanReadableId] = 26364
),
Fraction AS (
SELECT
ds.*,
DATEDIFF(SECOND,ds.DayStart,ds.DayEnd) AS OverlapSec
FROM DailySplit ds
),
Normalized AS (
SELECT
f.*,
f.OverlapSec * 1.0 /
NULLIF(SUM(f.OverlapSec) OVER
(PARTITION BY f.ProductionLotId, f.MaterialHumanReadableId),0)
AS NormalizedFraction
FROM Fraction f
)
SELECT
n.ProductionLotId,
n.ProductionLotHumanReadableId,
n.ArticleHumanReadableId,
n.ArticleDescription,
n.LocationId,
n.MachineHumanReadableId,
n.MachineDescription,
n.StartDate,
n.FinishDate,
n.ProductionCustomerDescription,
n.ProductionCustomerHumanReadableId,
n.PlannedQuantity,
n.PlannedLoadingUnit,
n.Cavity,
n.Utilisation,
n.LotTotalMaterialDemand,
n.MaterialHumanReadableId,
n.MaterialDescription,
n.LotMaterialTotalDemand,
n.CalDate,
n.NormalizedFraction * n.LotMaterialTotalDemand AS DailyMaterialDemand
FROM Normalized n
ORDER BY
n.MaterialHumanReadableId,
n.CalDate,
n.ProductionLotHumanReadableId
OPTION (MAXRECURSION 0);
`;