Compare commits

...

5 Commits

32 changed files with 5754 additions and 620 deletions

2
.gitignore vendored
View File

@@ -14,7 +14,7 @@ controllerBuilds
lstV2/frontend/.tanstack
mobileLst
keys
# Logs
logs
*.log

View File

@@ -1,5 +1,6 @@
dist
frontend/dist
mobileLst/dist
lstDocs/build
migrations
Dockerfile

14
.vscode/launch.json vendored
View File

@@ -1,14 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Attach to packager",
"cwd": "${workspaceFolder}",
"type": "reactnative",
"request": "attach"
}
]
}

View File

@@ -1,48 +0,0 @@
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"workbench.colorTheme": "Default Dark+",
"prettier.tabWidth": 4,
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"[javascript]": {
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"]
}

68
.vscode/settings.json vendored
View File

@@ -1,19 +1,59 @@
{
"editor.defaultFormatter": "biomejs.biome",
"workbench.colorTheme": "Default Dark+",
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"editor.defaultFormatter": "biomejs.biome",
"workbench.colorTheme": "Default Dark+",
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit"
},
"cSpell.words": [
"acitve",
"alpla",
"alplamart",
"alplaprod",
"intiallally",
"ppoo",
"prodlabels"
]
"[javascript]": {
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": [
"acitve",
"alpla",
"alplamart",
"alplaprod",
"intiallally",
"ppoo",
"prodlabels"
]
}

View File

@@ -5,13 +5,13 @@ meta {
}
get {
url: {{url}}/lst/old/api/eom/histinv?month=2025-11-01
url: {{url}}/lst/old/api/eom/histinv?month=2025/11/1
body: none
auth: inherit
}
params:query {
month: 2025-11-01
month: 2025/11/1
}
settings {

View File

@@ -0,0 +1,16 @@
meta {
name: materialPerDay
type: http
seq: 2
}
get {
url: {{urlv2}}/api/notify/materialperday
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,25 @@
meta {
name: Add pallet
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/quality/newrequest
body: json
auth: inherit
}
body:json {
{
"username": "matthes01",
"runningNr": 618302,
"palletStatusText":"return" // returned will be the only allowed key
//"moveTo": "hold area" //hold area, rework, inspection
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Get Pallets
type: http
seq: 1
}
get {
url: {{url}}/lst/old/api/quality/getrequest
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Quality
seq: 7
}
auth {
mode: inherit
}

View File

@@ -5,11 +5,16 @@ meta {
}
get {
url:
url: {{url}}/lst/api/user/me
body: none
auth: inherit
auth: bearer
}
auth:bearer {
token: jpHHbLNGJRpUMvfrVOYmhbJL2Ux0arse
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: otacheck
type: http
seq: 3
}
get {
url: http://10.193.0.56:4000/api/mobile/updates
body: none
auth: inherit
}
headers {
expo-runtime-version: 1.0.0
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -3,6 +3,7 @@ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import { toNodeHandler } from "better-auth/node";
import cors from "cors";
import express from "express";
import fs from "fs";
import { createServer } from "http";
import { createProxyMiddleware, fixRequestBody } from "http-proxy-middleware";
import morgan from "morgan";
@@ -13,6 +14,7 @@ import swaggerUi from "swagger-ui-express";
import { fileURLToPath } from "url";
import { userMigrate } from "./src/internal/auth/controller/userMigrate.js";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
import { setupMobileRoutes } from "./src/internal/mobile/route.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { baseModules } from "./src/internal/system/controller/modules/baseModules.js";
@@ -156,12 +158,20 @@ const main = async () => {
},
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true,
exposedHeaders: ["set-cookie"],
exposedHeaders: [
"set-cookie",
"expo-protocol-version",
"expo-sfv-version",
],
allowedHeaders: [
"Content-Type",
"Authorization",
"X-Requested-With",
"XMLHttpRequest",
"expo-runtime-version",
"expo-platform",
"expo-channel-name",
"*",
],
}),
);
@@ -188,22 +198,6 @@ const main = async () => {
res.sendFile(join(__dirname, "../lstDocs/build/index.html"));
});
// app ota updates
app.use(
basePath + "/api/mobile/updates",
express.static(join(__dirname, "../mobileLst/dist"), {
setHeaders(res) {
// OTA runtime needs to fetch these from the device
console.log("OTA check called");
res.setHeader("Access-Control-Allow-Origin", "*");
},
}),
);
app.get(basePath + "/api/mobile", (_, res) =>
res.status(200).json({ message: "LST OTA server is up." }),
);
// server setup
const server = createServer(app);
@@ -223,7 +217,7 @@ const main = async () => {
// start up the v1listener
v1Listener();
addListeners();
userMigrate();
//userMigrate();
// some temp fixes
manualFixes();

View File

@@ -0,0 +1,211 @@
import type { Express, Request, Response } from "express";
import express, { Router } from "express";
import { readdirSync, readFileSync, statSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import crypto from "crypto";
import fs from "fs";
export const setupMobileRoutes = (app: Express, basePath: string) => {
const router = Router();
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const distPath = join(__dirname, "../../../../mobileLst/dist");
function generateAssetManifest(baseUrl: string) {
const assets: any[] = [];
const assetsDir = join(distPath, "assets");
try {
if (!fs.existsSync(assetsDir)) {
return assets;
}
const files = readdirSync(assetsDir);
files.forEach((file) => {
const filePath = join(assetsDir, file);
const stats = statSync(filePath);
if (stats.isFile()) {
const content = readFileSync(filePath);
const hash = crypto
.createHash("sha256")
.update(content)
.digest("hex");
assets.push({
hash: hash,
key: file,
fileExtension: `.${file.split(".").pop()}`,
contentType: getContentType(file),
url: `${baseUrl}/assets/${file}`,
});
}
});
} catch (err) {
console.log("Error reading assets:", err);
}
return assets;
}
function getContentType(filename: string): string {
const ext = filename.split(".").pop()?.toLowerCase();
const contentTypes: { [key: string]: string } = {
hbc: "application/javascript",
bundle: "application/javascript",
js: "application/javascript",
json: "application/json",
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
ttf: "font/ttf",
otf: "font/otf",
woff: "font/woff",
woff2: "font/woff2",
};
return contentTypes[ext || ""] || "application/octet-stream";
}
app.get(basePath + "/api/mobile/updates", (req, res) => {
console.log("=== OTA Update Request ===");
console.log("Headers:", JSON.stringify(req.headers, null, 2));
const runtimeVersion = req.headers["expo-runtime-version"];
const platform = req.headers["expo-platform"] || "android";
const expectedRuntimeVersion = "1.0.0";
if (runtimeVersion !== expectedRuntimeVersion) {
console.log(
`Runtime mismatch: got ${runtimeVersion}, expected ${expectedRuntimeVersion}`
);
return res.status(404).json({
error: "No update available for this runtime version",
requestedVersion: runtimeVersion,
availableVersion: expectedRuntimeVersion,
});
}
try {
// const host = req.get('host');
// // If it's the production domain, force https
// const protocol = host.includes('alpla.net') ? 'https' : req.protocol;
// const baseUrl = `${protocol}://${host}/lst/api/mobile/updates`
const host = req.get('host'); // Should be "usmcd1vms036:4000"
const protocol = 'http';
const baseUrl = `${protocol}://${host}/api/mobile/updates`;
// Find the .hbc file
const bundleDir = join(distPath, "_expo/static/js/android");
if (!fs.existsSync(bundleDir)) {
console.error("Bundle directory does not exist:", bundleDir);
return res
.status(500)
.json({ error: "Bundle directory not found" });
}
const bundleFiles = readdirSync(bundleDir);
console.log("Available bundle files:", bundleFiles);
const bundleFile = bundleFiles.find((f) => f.endsWith(".hbc"));
if (!bundleFile) {
console.error("No .hbc file found in:", bundleDir);
return res
.status(500)
.json({ error: "Hermes bundle (.hbc) not found" });
}
console.log("Using bundle file:", bundleFile);
const bundlePath = join(bundleDir, bundleFile);
const bundleContent = readFileSync(bundlePath);
const bundleHash = crypto
.createHash("sha256")
.update(bundleContent)
.digest("hex");
const updateId = crypto.randomUUID();
const createdAt = new Date().toISOString();
// This is the NEW manifest format for Expo SDK 50+
const manifest = {
id: updateId,
createdAt: createdAt,
runtimeVersion: expectedRuntimeVersion,
launchAsset: {
hash: bundleHash,
key: bundleFile,
contentType: "application/javascript",
fileExtension: ".hbc",
url: `${baseUrl}/_expo/static/js/android/${bundleFile}`,
},
assets: generateAssetManifest(baseUrl),
metadata: {},
extra: {
expoClient: {
name: "LSTScanner",
slug: "lst-scanner-app",
version: "1.0.0",
runtimeVersion: expectedRuntimeVersion,
},
},
};
console.log(
"Returning manifest:",
JSON.stringify(manifest, null, 2)
);
res.setHeader("Content-Type", "application/json");
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("expo-protocol-version", "1");
res.setHeader("expo-sfv-version", "0");
res.json(manifest);
} catch (error: any) {
console.error("Error generating manifest:", error);
res.status(500).json({
error: "Failed to generate manifest",
details: error.message,
stack: error.stack,
});
}
});
// Serve static files
app.use(
basePath + "/api/mobile/updates",
express.static(distPath, {
setHeaders(res, path) {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Cache-Control", "public, max-age=31536000");
if (path.endsWith(".hbc")) {
res.setHeader("Content-Type", "application/javascript");
}
},
})
);
// app.use(
// basePath + "/api/mobile/updates",
// express.static(join(__dirname, mobileDir), {
// setHeaders(res) {
// // OTA runtime needs to fetch these from the device
// console.log("OTA check called");
// res.setHeader("Access-Control-Allow-Origin", "*");
// },
// })
// );
// app.get(basePath + "/api/mobile/updates", (req, res) => {
// res.redirect(basePath + "/api/mobile/updates/metadata.json");
// });
app.get(basePath + "/api/mobile", (_, res) =>
res.status(200).json({ message: "LST OTA server is up." })
);
};

View File

@@ -4,22 +4,24 @@ import { setupAuthRoutes } from "../auth/routes/routes.js";
import { setupForkliftRoutes } from "../forklifts/routes/routes.js";
import { setupLogisticsRoutes } from "../logistics/routes.js";
import { setupSystemRoutes } from "../system/routes.js";
import { setupMobileRoutes } from "../mobile/route.js";
export const setupRoutes = (app: Express, basePath: string) => {
// all routes
setupAuthRoutes(app, basePath);
setupAdminRoutes(app, basePath);
setupSystemRoutes(app, basePath);
setupLogisticsRoutes(app, basePath);
setupForkliftRoutes(app, basePath);
// all routes
setupAuthRoutes(app, basePath);
setupAdminRoutes(app, basePath);
setupSystemRoutes(app, basePath);
setupLogisticsRoutes(app, basePath);
setupForkliftRoutes(app, basePath);
setupMobileRoutes(app, basePath);
// always try to go to the app weather we are in dev or in production.
app.get(basePath + "/", (req: Request, res: Response) => {
res.redirect(basePath + "/app");
});
// always try to go to the app weather we are in dev or in production.
app.get(basePath + "/", (req: Request, res: Response) => {
res.redirect(basePath + "/app");
});
// Fallback 404 handler
app.use((req: Request, res: Response) => {
res.status(404).json({ error: "Not Found" });
});
// Fallback 404 handler
app.use((req: Request, res: Response) => {
res.status(404).json({ error: "Not Found" });
});
};

View File

@@ -3,53 +3,72 @@ import ForecastImport from "./ForecastImport";
import OrderImport from "./OrderImport";
export default function DMButtons() {
const { settings } = useSettingStore();
const testServers = ["test1", "test2", "test3"];
const plantToken = settings.filter((n) => n.name === "plantToken");
//console.log(plantToken);
return (
<div className="flex flex-row-reverse gap-1">
<OrderImport fileType={"macro"} name={"Macro Import"} />
{/* dev and testserver sees all */}
{testServers.includes(plantToken[0]?.value) && (
<div className="flex flex-row gap-2">
<OrderImport fileType={"abbott"} name={"Abbott truck list"} />
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} />
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
<ForecastImport fileType={"pg"} name={"P&G"} />
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} />
</div>
)}
{plantToken[0]?.value === "usday1" && (
<div className="flex flex-row gap-2">
<OrderImport fileType={"abbott"} name={"Abbott truck list"} />
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} />
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} />
</div>
)}
{plantToken[0]?.value === "usflo1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
</div>
)}
{plantToken[0]?.value === "usstp1" && (
<div className="flex flex-row gap-2"></div>
)}
{plantToken[0]?.value === "usiow1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usiow2" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usksc1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
</div>
);
const { settings } = useSettingStore();
const testServers = ["test1", "test2", "test3"];
const plantToken = settings.filter((n) => n.name === "plantToken");
//console.log(plantToken);
return (
<div className="flex flex-row-reverse gap-1">
<OrderImport fileType={"macro"} name={"Macro Import"} />
{/* dev and testserver sees all */}
{testServers.includes(plantToken[0]?.value) && (
<div className="flex flex-row gap-2">
<OrderImport
fileType={"abbott"}
name={"Abbott truck list"}
/>
<OrderImport
fileType={"energizer"}
name={"Energizer Truck List"}
/>
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
<ForecastImport fileType={"pg"} name={"P&G"} />
<ForecastImport
fileType={"energizer"}
name={"Energizer Forecast"}
/>
</div>
)}
{plantToken[0]?.value === "usday1" && (
<div className="flex flex-row gap-2">
<OrderImport
fileType={"abbott"}
name={"Abbott truck list"}
/>
<OrderImport
fileType={"energizer"}
name={"Energizer Truck List"}
/>
<ForecastImport
fileType={"energizer"}
name={"Energizer Forecast"}
/>
</div>
)}
{plantToken[0]?.value === "usflo1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
</div>
)}
{plantToken[0]?.value === "usstp1" && (
<div className="flex flex-row gap-2"></div>
)}
{plantToken[0]?.value === "usiow1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usiow2" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usksc1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
</div>
);
}

View File

@@ -3,70 +3,80 @@ import { useRef, useState } from "react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { useAuth } from "@/lib/authClient";
import { useNavigate, useRouterState } from "@tanstack/react-router";
export default function ForecastImport(props: any) {
const fileInputRef: any = useRef(null);
const [posting, setPosting] = useState(false);
//const token = localStorage.getItem("auth_token");
const { session } = useAuth();
//const [fileType, setFileType] = useState("");
const importOrders = async (e: any) => {
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
setPosting(false);
return;
}
const fileInputRef: any = useRef(null);
const [posting, setPosting] = useState(false);
//const token = localStorage.getItem("auth_token");
const { session } = useAuth();
//const [fileType, setFileType] = useState("");
const navigate = useNavigate();
const router = useRouterState();
const currentPath = router.location.href;
// create the form data with the correct fileType
const formData = new FormData();
formData.append("postForecast", e.target.files[0]);
formData.append("fileType", props.fileType); // extra field
formData.append("username", `${session?.user.username}`);
const importOrders = async (e: any) => {
if (!session || !session.user) {
toast.error("You are allowed to do this unless you are logged in");
navigate({ to: "/login", search: { redirect: currentPath } });
return;
}
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
setPosting(false);
return;
}
// console.log(formData);
toast.success("Import started.");
try {
const response = await axios.post(
"/lst/old/api/logistics/postforecastin",
formData,
{
headers: {
"Content-Type": "multipart/form-data",
},
},
);
//console.log("Upload successful:", response.data);
toast.success(response?.data?.message);
fileInputRef.current.value = null;
setPosting(false);
// toast.success(
// "File Uploaded, please validate processing in alplaprod 2.0"
// );
setPosting(false);
} catch (error) {
console.log(error);
toast.error("Upload failed");
}
setPosting(false);
};
// create the form data with the correct fileType
const formData = new FormData();
formData.append("postForecast", e.target.files[0]);
formData.append("fileType", props.fileType); // extra field
formData.append("username", `${session?.user.username}`);
const handleButtonClick = () => {
setPosting(true);
fileInputRef.current.click();
};
return (
<div>
<Button onClick={handleButtonClick} disabled={posting}>
{props.name}
</Button>
<input
type="file"
accept=".xlsx, .xls, .xlsm"
ref={fileInputRef}
style={{ display: "none" }}
onChange={importOrders}
/>
</div>
);
// console.log(formData);
toast.success("Import started.");
try {
const response = await axios.post(
"/lst/old/api/logistics/postforecastin",
formData,
{
headers: {
"Content-Type": "multipart/form-data",
},
}
);
//console.log("Upload successful:", response.data);
toast.success(response?.data?.message);
fileInputRef.current.value = null;
setPosting(false);
// toast.success(
// "File Uploaded, please validate processing in alplaprod 2.0"
// );
setPosting(false);
} catch (error) {
console.log(error);
toast.error("Upload failed");
}
setPosting(false);
};
const handleButtonClick = () => {
setPosting(true);
fileInputRef.current.click();
};
return (
<div>
<Button onClick={handleButtonClick} disabled={posting}>
{props.name}
</Button>
<input
type="file"
accept=".xlsx, .xls, .xlsm"
ref={fileInputRef}
style={{ display: "none" }}
onChange={importOrders}
/>
</div>
);
}

View File

@@ -3,63 +3,72 @@ import { useRef, useState } from "react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { useAuth } from "@/lib/authClient";
import { useNavigate, useRouterState } from "@tanstack/react-router";
export default function OrderImport(props: any) {
const fileInputRef: any = useRef(null);
const [posting, setPosting] = useState(false);
const { session } = useAuth();
//const [fileType, setFileType] = useState("");
const importOrders = async (e: any) => {
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
setPosting(false);
return;
}
const fileInputRef: any = useRef(null);
const [posting, setPosting] = useState(false);
const { session } = useAuth();
const navigate = useNavigate();
const router = useRouterState();
const currentPath = router.location.href;
//const [fileType, setFileType] = useState("");
const importOrders = async (e: any) => {
if (!session || !session.user) {
toast.error("You are allowed to do this unless you are logged in");
navigate({ to: "/login", search: { redirect: currentPath } });
return;
}
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
setPosting(false);
return;
}
// create the form data with the correct fileType
const formData = new FormData();
formData.append("postOrders", e.target.files[0]);
formData.append("fileType", props.fileType); // extra field
formData.append("username", `${session?.user.username}`);
// create the form data with the correct fileType
const formData = new FormData();
formData.append("postOrders", e.target.files[0]);
formData.append("fileType", props.fileType); // extra field
formData.append("username", `${session?.user.username}`);
try {
const response = await axios.post(
"/lst/old/api/logistics/postbulkorders",
formData,
{
headers: {
"Content-Type": "multipart/form-data",
},
},
);
//console.log("Upload successful:", response.data);
toast.success(response?.data?.message);
fileInputRef.current.value = null;
setPosting(false);
} catch (error) {
console.log(error);
toast.error("Upload failed");
}
setPosting(false);
};
try {
const response = await axios.post(
"/lst/old/api/logistics/postbulkorders",
formData,
{
headers: {
"Content-Type": "multipart/form-data",
},
}
);
//console.log("Upload successful:", response.data);
toast.success(response?.data?.message);
fileInputRef.current.value = null;
setPosting(false);
} catch (error) {
console.log(error);
toast.error("Upload failed");
}
setPosting(false);
};
const handleButtonClick = () => {
setPosting(true);
fileInputRef.current.click();
};
return (
<div>
<Button onClick={handleButtonClick} disabled={posting}>
{props.name}
</Button>
<input
type="file"
accept=".xlsx, .xls, .xlsm"
ref={fileInputRef}
style={{ display: "none" }}
onChange={importOrders}
/>
</div>
);
const handleButtonClick = () => {
setPosting(true);
fileInputRef.current.click();
};
return (
<div>
<Button onClick={handleButtonClick} disabled={posting}>
{props.name}
</Button>
<input
type="file"
accept=".xlsx, .xls, .xlsm"
ref={fileInputRef}
style={{ display: "none" }}
onChange={importOrders}
/>
</div>
);
}

View File

@@ -0,0 +1 @@
ALTER TABLE "qualityRequest" ADD COLUMN "qualityDurationToInspect" integer;

View File

@@ -0,0 +1 @@
ALTER TABLE "qualityRequest" ADD COLUMN "returnDurationToInspect" integer;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -519,6 +519,20 @@
"when": 1760708711258,
"tag": "0073_bumpy_dust",
"breakpoints": true
},
{
"idx": 74,
"version": "7",
"when": 1762966327361,
"tag": "0074_overconfident_may_parker",
"breakpoints": true
},
{
"idx": 75,
"version": "7",
"when": 1762983466464,
"tag": "0075_tan_unicorn",
"breakpoints": true
}
]
}

View File

@@ -25,6 +25,8 @@ export const qualityRequest = pgTable(
warehouseMovedTo: text("warehouseMovedTo"),
locationMovedTo: text("locationMovedTo"),
durationToMove: integer("durationToMove"),
qualityDurationToInspect: integer("qualityDurationToInspect"),
returnDurationToInspect: integer("returnDurationToInspect"),
locationDropOff: text("locationDropOff"),
palletStatus: integer("palletStatus"),
palletStatusText: text("palletStatusText"),

View File

@@ -0,0 +1,88 @@
import {
addDays,
format,
formatISO,
isBefore,
parseISO,
startOfWeek,
} from "date-fns";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { materialPerDay } from "../../../sqlServer/querys/dataMart/materialPerDay.js";
function toDate(val: any) {
if (val instanceof Date) return val;
if (typeof val === "string") return parseISO(val.replace(" ", "T"));
return new Date(val);
}
export function sumByMaterialAndWeek(data: any) {
/** @type {Record<string, Record<string, number>>} */
const grouped: any = {};
for (const r of data) {
const mat = String(r.MaterialHumanReadableId);
const d = toDate(r.CalDate);
const week = formatISO(startOfWeek(d, { weekStartsOn: 1 }), {
representation: "date",
});
grouped[mat] ??= {};
grouped[mat][week] ??= 0;
grouped[mat][week] += Number(r.DailyMaterialDemand) || 0;
}
const result = [];
for (const [mat, weeks] of Object.entries(grouped)) {
// @ts-ignore
for (const [week, total] of Object.entries(weeks)) {
result.push({
MaterialHumanReadableId: mat,
WeekStart: week,
WeeklyDemand: Number(total).toFixed(2),
});
}
}
return result;
}
export default async function materialPerDayCheck() {
/**
* getting the shipped pallets
*/
const startDate = format(new Date(Date.now()), "yyyy-MM-dd");
const endDate = format(addDays(new Date(Date.now()), 90), "yyyy-MM-dd");
const { data, error } = (await tryCatch(
query(
materialPerDay
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"material check",
),
)) as any;
if (error) {
return {
success: false,
message: "Error getting the material data",
error,
};
}
if (!data.success) {
return {
success: false,
message: data.message,
data: [],
};
}
return {
success: true,
message: "material data",
data: sumByMaterialAndWeek(data.data),
};
}

View File

@@ -1,60 +1,60 @@
import { OpenAPIHono } from "@hono/zod-openapi";
import sendemail from "./routes/sendMail.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { db } from "../../../database/dbclient.js";
import { notifications } from "../../../database/schema/notifications.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { createLog } from "../logger/logger.js";
import fifoIndex from "./routes/fifoIndex.js";
import notifyStats from "./routes/getActiveNotifications.js";
import notify from "./routes/getNotifications.js";
import tiTrigger from "./routes/manualTiggerTi.js";
import materialCheck from "./routes/materialPerDay.js";
import blocking from "./routes/qualityBlocking.js";
import sendemail from "./routes/sendMail.js";
import { note, notificationCreate } from "./utils/masterNotifications.js";
import { startNotificationMonitor } from "./utils/processNotifications.js";
import notifyStats from "./routes/getActiveNotifications.js";
import tiTrigger from "./routes/manualTiggerTi.js";
import blocking from "./routes/qualityBlocking.js";
import notify from "./routes/getNotifications.js";
import fifoIndex from "./routes/fifoIndex.js";
const app = new OpenAPIHono();
const routes = [
sendemail,
notifyStats,
tiTrigger,
blocking,
notify,
fifoIndex,
sendemail,
notifyStats,
tiTrigger,
blocking,
notify,
fifoIndex,
materialCheck,
] as const;
const appRoutes = routes.forEach((route) => {
app.route("/notify", route);
app.route("/notify", route);
});
app.all("/notify/*", (c) => {
return c.json({
success: false,
message: "you have encounted a notication route that dose not exist.",
});
return c.json({
success: false,
message: "you have encounted a notication route that dose not exist.",
});
});
// check if the mastNotications is changed compared to the db and add if needed.
const { data: notes, error: notesError } = await tryCatch(
db.select().from(notifications)
db.select().from(notifications),
);
if (notesError) {
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
notesError
)}`
);
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
notesError,
)}`,
);
}
setTimeout(() => {
notificationCreate();
startNotificationMonitor();
notificationCreate();
startNotificationMonitor();
}, 5 * 1000);
export default app;

View File

@@ -0,0 +1,37 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import fifoIndexCheck from "../controller/notifications/fifoIndex.js";
import materialPerDayCheck from "../controller/notifications/materialPerDay.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["notify"],
summary: "",
method: "get",
path: "/materialperday",
//middleware: authMiddleware,
responses: responses(),
}),
async (c) => {
/**
* get the blocking notification stuff
*/
apiHit(c, { endpoint: "/materialperday" });
/**
* getting the shipped pallets
*/
const checkedData = await materialPerDayCheck();
return c.json({
success: checkedData.success,
message: checkedData.message,
data: checkedData.data,
});
},
);
export default app;

View File

@@ -1,171 +1,180 @@
import { differenceInMinutes } from "date-fns";
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { qualityRequest } from "../../../../database/schema/qualityRequest.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
export const addNewPallet = async (data: any, user: string) => {
/**
* Post new pallets
*/
export const addNewPallet = async (data: any) => {
/**
* Post new pallets
*/
if (parseInt(data.runningNr) <= 0) {
return {
sucess: false,
message: "Please add a valid running number.",
};
}
const updateQuery = qrequestQuery.replaceAll(
"[runningNumber]",
data.runningNr
);
const { data: c, error: ce } = await tryCatch(
db
.select()
.from(qualityRequest)
.where(eq(qualityRequest.runningNr, data.runningNr))
);
if (ce) {
return {
success: false,
message: "There was an error getting the quality request",
data: ce,
};
}
if (parseInt(data.runningNr) <= 0) {
return {
success: false,
message: "Please add a valid running number.",
};
}
const updateQuery = qrequestQuery.replaceAll(
"[runningNumber]",
data.runningNr,
);
const { data: c, error: ce } = await tryCatch(
db
.select()
.from(qualityRequest)
.where(eq(qualityRequest.runningNr, data.runningNr)),
);
if (ce) {
return {
success: false,
message: "There was an error getting the quality request",
data: ce,
};
}
const palletData: any = c;
// if the pallet exist then tell the user to check on it
if (
(palletData && palletData[0]?.palletStatus === 1) ||
palletData[0]?.palletStatus === 4
) {
return {
success: false,
message: `Running number ${data.runningNr} is already pending or reactivated please follow up with the warehouse team on status to be moved.`,
};
}
const palletData: any = c;
// if the pallet exist then tell the user to check on it
const pStatus = [1, 4, 6];
if (palletData && pStatus.includes(palletData[0]?.palletStatus)) {
return {
success: false,
message: `Running number ${data.runningNr} is already pending or reactivated please follow up with the warehouse team on status to be moved.`,
};
}
if (palletData.length > 0) {
try {
// get the pallet info from stock
const { data: pa, error: pe } = await tryCatch(
query(updateQuery, "quality request")
);
const p: any = pa;
if (pe) {
return {
success: false,
message: "There was an error getting the pallet from stock",
data: pe,
};
}
const pData = {
warehouseAtRequest: p[0].warehouseAtRequest,
locationAtRequest: p[0].locationAtRequest,
warehouseMovedTo: null,
locationMovedTo: null,
palletStatus: 4,
durationToMove: 0,
palletStatusText: "reactivated",
palletRequest: palletData[0].palletStatus + 1,
upd_user: user,
upd_date: new Date(timeZoneFix()),
};
// update the existing pallet if already in the system
if (palletData.length > 0) {
try {
// get the pallet info from stock
const { data: pa, error: pe } = await tryCatch(
query(updateQuery, "quality request"),
);
const p: any = pa ? pa.data : [];
if (pe) {
return {
success: false,
message: "There was an error getting the pallet from stock",
data: pe,
};
}
const pData = {
warehouseAtRequest: p[0].warehouseAtRequest,
locationAtRequest: p[0].locationAtRequest,
warehouseMovedTo: null,
locationMovedTo: null,
palletStatus: data.palletStatusText === "return" ? 6 : 4,
//durationToMove: 0,
palletStatusText:
data.palletStatusText === "return" ? "return" : "reactivated",
qualityDurationToInspect:
data.palletStatusText === "return"
? differenceInMinutes(new Date(Date.now()), p[0].lastMove)
: 0,
locationDropOff:
data.palletStatusText === "return" ? "Return to warhouse" : "",
palletRequest: palletData[0].palletStatus + 1,
upd_user: data.user,
upd_date: sql`NOW()`,
};
const { data: u, error } = await tryCatch(
db
.update(qualityRequest)
.set(pData)
.where(eq(qualityRequest.runningNr, data.runningNr))
);
const { data: u, error } = await tryCatch(
db
.update(qualityRequest)
.set(pData)
.where(eq(qualityRequest.runningNr, data.runningNr)),
);
if (error) {
return {
success: false,
message: `Running number: ${data.runningNr} encountered and error reactivated.`,
data: error,
};
}
if (error) {
return {
success: false,
message: `Running number: ${data.runningNr} encountered and error reactivated.`,
data: error,
};
}
if (data) {
return {
success: true,
message: `Running number: ${data.runningNr} was just reactivated.`,
data: u,
};
}
} catch (error) {
return {
success: false,
message:
"There was an error updating the pallet in quality request",
data: error,
};
}
}
if (data) {
return {
success: true,
message: `Running number: ${data.runningNr} was just reactivated.`,
data: u,
};
}
} catch (error) {
console.log(error);
return {
success: false,
message: "There was an error updating the pallet in quality request",
data: error,
};
}
}
// add new pallet
try {
const { data: px, error: pe } = await tryCatch(
query(updateQuery, "quality request")
);
const p: any = px;
if (p.length === 0) {
return {
success: false,
message: `Running Number ${data.runningNr} dose not exist in stock.`,
};
}
// add new pallet
try {
const { data: px, error: pe } = await tryCatch(
query(updateQuery, "quality request"),
);
if (pe) {
return {
success: false,
message: "There was an error getting the pallet from stock",
data: pe,
};
}
const p: any = px ? px.data : [];
if (p.length === 0) {
return {
success: false,
message: `Running Number ${data.runningNr} dose not exist in stock, please check the running number and try again.`,
};
}
const nData = {
article: p[0].article,
description: p[0].description,
runningNr: p[0].runningNr,
lotNr: p[0].lotNr,
warehouseAtRequest: p[0].warehouseAtRequest,
locationAtRequest: p[0].locationAtRequest,
locationDropOff: data.moveTo,
palletStatus: 1,
palletStatusText: "pending",
palletRequest: 1,
add_user: user,
upd_user: user,
};
if (pe) {
return {
success: false,
message: "There was an error getting the pallet from stock",
data: pe,
};
}
console.log(p);
const nData = {
article: p[0].article,
description: p[0].description,
runningNr: p[0].runningNr,
lotNr: p[0].lotNr,
warehouseAtRequest: p[0].warehouseAtRequest,
locationAtRequest: p[0].locationAtRequest,
locationDropOff: data.moveTo,
palletStatus: 1,
palletStatusText: "pending",
palletRequest: 1,
add_user: data.user,
upd_user: data.user,
};
const { data: u, error } = await tryCatch(
db.insert(qualityRequest).values(nData)
);
const { data: u, error } = await tryCatch(
db.insert(qualityRequest).values(nData).onConflictDoNothing(),
);
if (error) {
return {
success: false,
message: `Running number: ${data.runningNr} encountered and error reactivated.`,
data: error,
};
}
if (error) {
return {
success: false,
message: `Running number: ${data.runningNr} encountered and error reactivated.`,
data: error,
};
}
if (data) {
return {
success: true,
message: `Running number: ${data.runningNr} was just added.`,
data: u,
};
}
} catch (error) {
return {
success: false,
message: "There was an error adding the pallet in quality request",
data: error,
};
}
if (data) {
return {
success: true,
message: `Running number: ${data.runningNr} was just added.`,
data: u,
};
}
} catch (error) {
console.log(error);
return {
success: false,
message: "There was an error adding the pallet in quality request",
data: error,
};
}
};

View File

@@ -1,110 +1,127 @@
import { eq, inArray } from "drizzle-orm";
import { differenceInMinutes } from "date-fns";
import { eq, inArray, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { qualityRequest } from "../../../../database/schema/qualityRequest.js";
import { delay } from "../../../globalUtils/delay.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { differenceInMinutes } from "date-fns";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
export const qualityCycle = async () => {
/**
* Cycles the pallets in the quality request to see whats been moved or changed.
*/
/**
* Cycles the pallets in the quality request to see whats been moved or changed.
*/
const warehouse = [1, 4, 5];
// pallet request check interval 5min check to start
//setInterval(async () => {
// create the date stuff
const currentTime = new Date(Date.now());
// pallet request check interval 5min check to start
setInterval(
async () => {
// create the date stuff
const currentTime = new Date(Date.now());
// pull in all current pallets from our db
const { data, error } = await tryCatch(
db
.select()
.from(qualityRequest)
.where(inArray(qualityRequest.palletStatus, [1, 4, 5]))
);
// pull in all current pallets from our db
const { data, error } = await tryCatch(
db
.select()
.from(qualityRequest)
.where(inArray(qualityRequest.palletStatus, [1, 4, 5, 6])),
);
if (error) {
createLog(
"error",
"lst",
"quality",
`There was an error getting quality request data: ${error}`
);
return {
success: false,
message: "There was an error getting quality request data",
};
}
const lstQData: any = data;
// get the pallets that currentStat is moved
// const res = await runQuery(palletMoveCheck, "palletCheck");
if (error) {
createLog(
"error",
"lst",
"quality",
`There was an error getting quality request data: ${error}`,
);
return {
success: false,
message: "There was an error getting quality request data",
};
}
const lstQData: any = data;
// get the pallets that currentStat is moved
// const res = await runQuery(palletMoveCheck, "palletCheck");
if (lstQData.length != 0) {
for (let i = 0; i < lstQData.length; i++) {
// run the pallet query we will compare the data.
// console.log(lstQData[i]);
//update query with plant token
if (lstQData.length != 0) {
for (let i = 0; i < lstQData.length; i++) {
// run the pallet query we will compare the data.
// console.log(lstQData[i]);
//update query with plant token
// change the update the pallet number
const qPalletNumber = qrequestQuery.replaceAll(
"[runningNumber]",
lstQData[i].runningNr
);
// update the the pallet number
const qPalletNumber = qrequestQuery.replaceAll(
"[runningNumber]",
lstQData[i].runningNr,
);
const queryData: any = await query(
qPalletNumber,
"Quality update check"
);
let prodData: any =
queryData?.data.length === 0 ? [] : queryData.data;
const queryData: any = await query(
qPalletNumber,
"Quality update check",
);
let prodData: any =
queryData?.data.length === 0 ? [] : queryData.data;
if (
lstQData[i]?.locationAtRequest != prodData[0]?.locationAtRequest
) {
// time to do the pallet update stuff
const qDataPost = {
warehouseMovedTo: prodData[0]?.warehouseAtRequest,
locationMovedTo: prodData[0]?.locationAtRequest,
durationToMove: differenceInMinutes(
timeZoneFix(),
lstQData[i].upd_date
),
palletStatus: 2,
palletStatusText: "moved",
upd_date: new Date(timeZoneFix()),
upd_user: "LST_System",
};
if (
lstQData[i]?.locationAtRequest != prodData[0]?.locationAtRequest
) {
// time to do the pallet update stuff
const qDataPost = {
warehouseMovedTo: prodData[0]?.warehouseAtRequest,
locationMovedTo: prodData[0]?.locationAtRequest,
// how ling did it take the warhouse to originally move the pallet
durationToMove: warehouse.includes(lstQData[i].palletStatus)
? differenceInMinutes(
new Date(Date.now()),
lstQData[i].upd_date,
)
: lstQData[i].durationToMove,
// how long did it take warehouse to move the pallet back agian
returnDurationToInspect:
lstQData[i].palletStatus === 7
? differenceInMinutes(
new Date(Date.now()),
lstQData[i].upd_date,
)
: lstQData[i].qualityDurationToInspect,
palletStatus: 2,
palletStatusText: "moved",
upd_date: sql`NOW()`,
upd_user: "LST_System",
};
const updatePallet = await db
.update(qualityRequest)
.set(qDataPost)
.where(eq(qualityRequest.runningNr, lstQData[i].runningNr));
const updatePallet = await db
.update(qualityRequest)
.set(qDataPost)
.where(eq(qualityRequest.runningNr, lstQData[i].runningNr));
createLog(
"info",
"lst",
"quality",
`Pallet ${lstQData[i].runningNr} was updated`
);
} else {
createLog(
"debug",
"lst",
"quality",
`Pallet ${
lstQData[i].runningNr
} has not been moved yet it has been pending for ${differenceInMinutes(
timeZoneFix(),
lstQData[i].upd_date
)} min(s)`
);
}
}
} else {
createLog("debug", "lst", "quality", "nothing to update");
}
//}, 5 * 60 * 1000); // every 5 min
createLog(
"info",
"lst",
"quality",
`Pallet ${lstQData[i].runningNr} was updated`,
);
} else {
createLog(
"info",
"lst",
"quality",
`Pallet ${
lstQData[i].runningNr
} has not been moved yet it has been pending for ${differenceInMinutes(
new Date(Date.now()),
lstQData[i].upd_date,
)} min(s)`,
);
}
}
await delay(150);
} else {
createLog("info", "lst", "quality", "nothing to update");
}
},
5 * 60 * 1000,
); // every 5 min
};

View File

@@ -5,35 +5,37 @@ import postReq from "./route/postNewRequest.js";
// pallet status data.
export const statusOptions = [
{ name: "pending", uid: "1" },
{ name: "moved", uid: "2" },
{ name: "removed", uid: "3" },
{ name: "reactivated", uid: "4" },
{ name: "canceled", uid: "5" },
{ name: "pending", uid: "1" },
{ name: "moved", uid: "2" },
{ name: "removed", uid: "3" },
{ name: "reactivated", uid: "4" },
{ name: "canceled", uid: "5" },
{ name: "return", uid: "6" },
{ name: "readyToReturn", uid: "7" },
];
const app = new OpenAPIHono();
const routes = [request, postReq] as const;
const appRoutes = routes.forEach((route) => {
app.route("/quality", route);
app.route("/quality", route);
});
app.all("/quality/*", (c) => {
return c.json({
success: false,
message: "You have encounters a quality route that dose not exist.",
});
return c.json({
success: false,
message: "You have encounters a quality route that dose not exist.",
});
});
/**
* Initial and run the cycle up for checking the pallet moves for quality
*/
setTimeout(() => {
qualityCycle();
qualityCycle();
}, 1000 * 5);
setInterval(() => {
qualityCycle();
qualityCycle();
}, 1000 * 60);
export default app;

View File

@@ -1,74 +1,72 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getRequest } from "../controller/getRequests.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { addNewPallet } from "../controller/addNewPallet.js";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { getRequest } from "../controller/getRequests.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
runningNr: z.number().openapi({ example: 1528 }),
moveTo: z.string().openapi({ example: "rework" }),
runningNr: z.number().openapi({ example: 1528 }),
moveTo: z.string().optional().openapi({ example: "rework" }),
});
app.openapi(
createRoute({
tags: ["quality"],
summary: "Returns all pallets requested",
method: "post",
path: "/newrequest",
middleware: authMiddleware,
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const authHeader = c.req.header("Authorization");
const token = authHeader?.split("Bearer ")[1] || "";
createRoute({
tags: ["quality"],
summary: "Returns all pallets requested",
method: "post",
path: "/newrequest",
middleware: authMiddleware,
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
// const authHeader = c.req.header("Authorization");
// const token = authHeader?.split("Bearer ")[1] || "";
const payload = await verify(token, process.env.JWT_SECRET!);
const user: any = payload.user;
// const payload = await verify(token, process.env.JWT_SECRET!);
// const user: any = payload.user;
const { data: b, error: e } = await tryCatch(c.req.json());
apiHit(c, { endpoint: "/newrequest", lastBody: b });
if (e) {
return c.json({
success: false,
message: "Missing Data",
});
}
const body: any = b;
// console.log(body);
// if (!body.runningNr) {
// return c.json({
// success: false,
// message: "Missing mandatory data.",
// });
// }
const { data: b, error: e } = await tryCatch(c.req.json());
apiHit(c, { endpoint: "/newrequest", lastBody: b });
if (e) {
return c.json({
success: false,
message: "Missing Data",
});
}
const body: any = b;
// console.log(body);
// if (!body.runningNr) {
// return c.json({
// success: false,
// message: "Missing mandatory data.",
// });
// }
const { data, error } = await tryCatch(
addNewPallet(body, user?.username)
);
const { data, error } = await tryCatch(addNewPallet(body));
if (error) {
return c.json({
success: false,
message: "There was an error adding the new pallet",
});
}
if (error) {
return c.json({
success: false,
message: "There was an error adding the new pallet",
});
}
return c.json({
success: data?.success,
message: data?.message,
data: data?.data,
});
}
return c.json({
success: data?.success,
message: data?.message,
data: data?.data,
});
},
);
export default app;

View File

@@ -0,0 +1,115 @@
/**
* This will return the material demand per day
* startdate and end date should be passed over
*/
export const materialPerDay = `
use [test3_AlplaPROD2.0_Read]
DECLARE @ShiftStartHour INT = 6
declare @startDate nvarchar(max) = '[startDate]'
declare @endDate nvarchar(max) = '[endDate]'
;with Calendar as (
select cast(@startDate as date) CalDate
union all
select dateadd(day,1,CalDate)
from Calendar
where CalDate < @endDate
),
DailySplit AS (
SELECT
-- Lot fields
l.Id AS ProductionLotId,
l.ProductionLotHumanReadableId,
l.ArticleHumanReadableId,
l.ArticleDescription,
l.LocationId,
l.MachineHumanReadableId,
l.MachineDescription,
l.StartDate,
l.FinishDate,
l.ProductionCustomerDescription,
l.ProductionCustomerHumanReadableId,
l.PlannedQuantity,
l.PlannedLoadingUnit,
l.Cavity,
l.Utilisation,
l.TotalMaterialDemand AS LotTotalMaterialDemand,
-- Material fields
m.MaterialHumanReadableId,
m.MaterialDescription,
m.TotalDemand AS LotMaterialTotalDemand,
c.CalDate,
DATEDIFF(SECOND,l.StartDate,l.FinishDate) AS LotDurationSec,
-- build shiftbased 24hour window (e.g. 06:00 → next day06:00)
CASE
WHEN l.StartDate > DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2(7)))
THEN l.StartDate
ELSE DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2(7)))
END AS DayStart,
CASE
WHEN l.FinishDate < DATEADD(SECOND,-0.0000001,
DATEADD(HOUR,@ShiftStartHour,
DATEADD(DAY,1,CAST(c.CalDate AS DATETIME2(7)))))
THEN l.FinishDate
ELSE DATEADD(SECOND,-0.0000001,
DATEADD(HOUR,@ShiftStartHour,
DATEADD(DAY,1,CAST(c.CalDate AS DATETIME2(7)))))
END AS DayEnd
FROM [issueMaterial].[ProductionLot] (nolock) AS l
LEFT JOIN [issueMaterial].[MaterialDemand] (nolock) AS m
ON m.ProductionLotId = l.Id
CROSS JOIN Calendar AS c
WHERE DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2))
< l.FinishDate
AND DATEADD(HOUR,@ShiftStartHour+24,CAST(c.CalDate AS DATETIME2))
> l.StartDate
--and l.[ProductionLotHumanReadableId] = 26364
),
Fraction AS (
SELECT
ds.*,
DATEDIFF(SECOND,ds.DayStart,ds.DayEnd) AS OverlapSec
FROM DailySplit ds
),
Normalized AS (
SELECT
f.*,
f.OverlapSec * 1.0 /
NULLIF(SUM(f.OverlapSec) OVER
(PARTITION BY f.ProductionLotId, f.MaterialHumanReadableId),0)
AS NormalizedFraction
FROM Fraction f
)
SELECT
n.ProductionLotId,
n.ProductionLotHumanReadableId,
n.ArticleHumanReadableId,
n.ArticleDescription,
n.LocationId,
n.MachineHumanReadableId,
n.MachineDescription,
n.StartDate,
n.FinishDate,
n.ProductionCustomerDescription,
n.ProductionCustomerHumanReadableId,
n.PlannedQuantity,
n.PlannedLoadingUnit,
n.Cavity,
n.Utilisation,
n.LotTotalMaterialDemand,
n.MaterialHumanReadableId,
n.MaterialDescription,
n.LotMaterialTotalDemand,
n.CalDate,
n.NormalizedFraction * n.LotMaterialTotalDemand AS DailyMaterialDemand
FROM Normalized n
ORDER BY
n.MaterialHumanReadableId,
n.CalDate,
n.ProductionLotHumanReadableId
OPTION (MAXRECURSION 0);
`;