Compare commits

...

5 Commits

32 changed files with 5754 additions and 620 deletions

2
.gitignore vendored
View File

@@ -14,7 +14,7 @@ controllerBuilds
lstV2/frontend/.tanstack
mobileLst
keys
# Logs
logs
*.log

View File

@@ -1,5 +1,6 @@
dist
frontend/dist
mobileLst/dist
lstDocs/build
migrations
Dockerfile

14
.vscode/launch.json vendored
View File

@@ -1,14 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Attach to packager",
"cwd": "${workspaceFolder}",
"type": "reactnative",
"request": "attach"
}
]
}

View File

@@ -1,48 +0,0 @@
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"workbench.colorTheme": "Default Dark+",
"prettier.tabWidth": 4,
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"[javascript]": {
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"]
}

40
.vscode/settings.json vendored
View File

@@ -7,6 +7,46 @@
"source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit"
},
"[javascript]": {
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": [
"acitve",
"alpla",

View File

@@ -5,13 +5,13 @@ meta {
}
get {
url: {{url}}/lst/old/api/eom/histinv?month=2025-11-01
url: {{url}}/lst/old/api/eom/histinv?month=2025/11/1
body: none
auth: inherit
}
params:query {
month: 2025-11-01
month: 2025/11/1
}
settings {

View File

@@ -0,0 +1,16 @@
meta {
name: materialPerDay
type: http
seq: 2
}
get {
url: {{urlv2}}/api/notify/materialperday
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,25 @@
meta {
name: Add pallet
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/quality/newrequest
body: json
auth: inherit
}
body:json {
{
"username": "matthes01",
"runningNr": 618302,
"palletStatusText":"return" // returned will be the only allowed key
//"moveTo": "hold area" //hold area, rework, inspection
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Get Pallets
type: http
seq: 1
}
get {
url: {{url}}/lst/old/api/quality/getrequest
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Quality
seq: 7
}
auth {
mode: inherit
}

View File

@@ -5,11 +5,16 @@ meta {
}
get {
url:
url: {{url}}/lst/api/user/me
body: none
auth: inherit
auth: bearer
}
auth:bearer {
token: jpHHbLNGJRpUMvfrVOYmhbJL2Ux0arse
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: otacheck
type: http
seq: 3
}
get {
url: http://10.193.0.56:4000/api/mobile/updates
body: none
auth: inherit
}
headers {
expo-runtime-version: 1.0.0
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -3,6 +3,7 @@ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import { toNodeHandler } from "better-auth/node";
import cors from "cors";
import express from "express";
import fs from "fs";
import { createServer } from "http";
import { createProxyMiddleware, fixRequestBody } from "http-proxy-middleware";
import morgan from "morgan";
@@ -13,6 +14,7 @@ import swaggerUi from "swagger-ui-express";
import { fileURLToPath } from "url";
import { userMigrate } from "./src/internal/auth/controller/userMigrate.js";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
import { setupMobileRoutes } from "./src/internal/mobile/route.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { baseModules } from "./src/internal/system/controller/modules/baseModules.js";
@@ -156,12 +158,20 @@ const main = async () => {
},
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true,
exposedHeaders: ["set-cookie"],
exposedHeaders: [
"set-cookie",
"expo-protocol-version",
"expo-sfv-version",
],
allowedHeaders: [
"Content-Type",
"Authorization",
"X-Requested-With",
"XMLHttpRequest",
"expo-runtime-version",
"expo-platform",
"expo-channel-name",
"*",
],
}),
);
@@ -188,22 +198,6 @@ const main = async () => {
res.sendFile(join(__dirname, "../lstDocs/build/index.html"));
});
// app ota updates
app.use(
basePath + "/api/mobile/updates",
express.static(join(__dirname, "../mobileLst/dist"), {
setHeaders(res) {
// OTA runtime needs to fetch these from the device
console.log("OTA check called");
res.setHeader("Access-Control-Allow-Origin", "*");
},
}),
);
app.get(basePath + "/api/mobile", (_, res) =>
res.status(200).json({ message: "LST OTA server is up." }),
);
// server setup
const server = createServer(app);
@@ -223,7 +217,7 @@ const main = async () => {
// start up the v1listener
v1Listener();
addListeners();
userMigrate();
//userMigrate();
// some temp fixes
manualFixes();

View File

@@ -0,0 +1,211 @@
import type { Express, Request, Response } from "express";
import express, { Router } from "express";
import { readdirSync, readFileSync, statSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import crypto from "crypto";
import fs from "fs";
export const setupMobileRoutes = (app: Express, basePath: string) => {
const router = Router();
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const distPath = join(__dirname, "../../../../mobileLst/dist");
function generateAssetManifest(baseUrl: string) {
const assets: any[] = [];
const assetsDir = join(distPath, "assets");
try {
if (!fs.existsSync(assetsDir)) {
return assets;
}
const files = readdirSync(assetsDir);
files.forEach((file) => {
const filePath = join(assetsDir, file);
const stats = statSync(filePath);
if (stats.isFile()) {
const content = readFileSync(filePath);
const hash = crypto
.createHash("sha256")
.update(content)
.digest("hex");
assets.push({
hash: hash,
key: file,
fileExtension: `.${file.split(".").pop()}`,
contentType: getContentType(file),
url: `${baseUrl}/assets/${file}`,
});
}
});
} catch (err) {
console.log("Error reading assets:", err);
}
return assets;
}
function getContentType(filename: string): string {
const ext = filename.split(".").pop()?.toLowerCase();
const contentTypes: { [key: string]: string } = {
hbc: "application/javascript",
bundle: "application/javascript",
js: "application/javascript",
json: "application/json",
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
ttf: "font/ttf",
otf: "font/otf",
woff: "font/woff",
woff2: "font/woff2",
};
return contentTypes[ext || ""] || "application/octet-stream";
}
app.get(basePath + "/api/mobile/updates", (req, res) => {
console.log("=== OTA Update Request ===");
console.log("Headers:", JSON.stringify(req.headers, null, 2));
const runtimeVersion = req.headers["expo-runtime-version"];
const platform = req.headers["expo-platform"] || "android";
const expectedRuntimeVersion = "1.0.0";
if (runtimeVersion !== expectedRuntimeVersion) {
console.log(
`Runtime mismatch: got ${runtimeVersion}, expected ${expectedRuntimeVersion}`
);
return res.status(404).json({
error: "No update available for this runtime version",
requestedVersion: runtimeVersion,
availableVersion: expectedRuntimeVersion,
});
}
try {
// const host = req.get('host');
// // If it's the production domain, force https
// const protocol = host.includes('alpla.net') ? 'https' : req.protocol;
// const baseUrl = `${protocol}://${host}/lst/api/mobile/updates`
const host = req.get('host'); // Should be "usmcd1vms036:4000"
const protocol = 'http';
const baseUrl = `${protocol}://${host}/api/mobile/updates`;
// Find the .hbc file
const bundleDir = join(distPath, "_expo/static/js/android");
if (!fs.existsSync(bundleDir)) {
console.error("Bundle directory does not exist:", bundleDir);
return res
.status(500)
.json({ error: "Bundle directory not found" });
}
const bundleFiles = readdirSync(bundleDir);
console.log("Available bundle files:", bundleFiles);
const bundleFile = bundleFiles.find((f) => f.endsWith(".hbc"));
if (!bundleFile) {
console.error("No .hbc file found in:", bundleDir);
return res
.status(500)
.json({ error: "Hermes bundle (.hbc) not found" });
}
console.log("Using bundle file:", bundleFile);
const bundlePath = join(bundleDir, bundleFile);
const bundleContent = readFileSync(bundlePath);
const bundleHash = crypto
.createHash("sha256")
.update(bundleContent)
.digest("hex");
const updateId = crypto.randomUUID();
const createdAt = new Date().toISOString();
// This is the NEW manifest format for Expo SDK 50+
const manifest = {
id: updateId,
createdAt: createdAt,
runtimeVersion: expectedRuntimeVersion,
launchAsset: {
hash: bundleHash,
key: bundleFile,
contentType: "application/javascript",
fileExtension: ".hbc",
url: `${baseUrl}/_expo/static/js/android/${bundleFile}`,
},
assets: generateAssetManifest(baseUrl),
metadata: {},
extra: {
expoClient: {
name: "LSTScanner",
slug: "lst-scanner-app",
version: "1.0.0",
runtimeVersion: expectedRuntimeVersion,
},
},
};
console.log(
"Returning manifest:",
JSON.stringify(manifest, null, 2)
);
res.setHeader("Content-Type", "application/json");
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("expo-protocol-version", "1");
res.setHeader("expo-sfv-version", "0");
res.json(manifest);
} catch (error: any) {
console.error("Error generating manifest:", error);
res.status(500).json({
error: "Failed to generate manifest",
details: error.message,
stack: error.stack,
});
}
});
// Serve static files
app.use(
basePath + "/api/mobile/updates",
express.static(distPath, {
setHeaders(res, path) {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Cache-Control", "public, max-age=31536000");
if (path.endsWith(".hbc")) {
res.setHeader("Content-Type", "application/javascript");
}
},
})
);
// app.use(
// basePath + "/api/mobile/updates",
// express.static(join(__dirname, mobileDir), {
// setHeaders(res) {
// // OTA runtime needs to fetch these from the device
// console.log("OTA check called");
// res.setHeader("Access-Control-Allow-Origin", "*");
// },
// })
// );
// app.get(basePath + "/api/mobile/updates", (req, res) => {
// res.redirect(basePath + "/api/mobile/updates/metadata.json");
// });
app.get(basePath + "/api/mobile", (_, res) =>
res.status(200).json({ message: "LST OTA server is up." })
);
};

View File

@@ -4,6 +4,7 @@ import { setupAuthRoutes } from "../auth/routes/routes.js";
import { setupForkliftRoutes } from "../forklifts/routes/routes.js";
import { setupLogisticsRoutes } from "../logistics/routes.js";
import { setupSystemRoutes } from "../system/routes.js";
import { setupMobileRoutes } from "../mobile/route.js";
export const setupRoutes = (app: Express, basePath: string) => {
// all routes
@@ -12,6 +13,7 @@ export const setupRoutes = (app: Express, basePath: string) => {
setupSystemRoutes(app, basePath);
setupLogisticsRoutes(app, basePath);
setupForkliftRoutes(app, basePath);
setupMobileRoutes(app, basePath);
// always try to go to the app weather we are in dev or in production.
app.get(basePath + "/", (req: Request, res: Response) => {

View File

@@ -6,6 +6,7 @@ export default function DMButtons() {
const { settings } = useSettingStore();
const testServers = ["test1", "test2", "test3"];
const plantToken = settings.filter((n) => n.name === "plantToken");
//console.log(plantToken);
return (
<div className="flex flex-row-reverse gap-1">
@@ -13,18 +14,36 @@ export default function DMButtons() {
{/* dev and testserver sees all */}
{testServers.includes(plantToken[0]?.value) && (
<div className="flex flex-row gap-2">
<OrderImport fileType={"abbott"} name={"Abbott truck list"} />
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} />
<OrderImport
fileType={"abbott"}
name={"Abbott truck list"}
/>
<OrderImport
fileType={"energizer"}
name={"Energizer Truck List"}
/>
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
<ForecastImport fileType={"pg"} name={"P&G"} />
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} />
<ForecastImport
fileType={"energizer"}
name={"Energizer Forecast"}
/>
</div>
)}
{plantToken[0]?.value === "usday1" && (
<div className="flex flex-row gap-2">
<OrderImport fileType={"abbott"} name={"Abbott truck list"} />
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} />
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} />
<OrderImport
fileType={"abbott"}
name={"Abbott truck list"}
/>
<OrderImport
fileType={"energizer"}
name={"Energizer Truck List"}
/>
<ForecastImport
fileType={"energizer"}
name={"Energizer Forecast"}
/>
</div>
)}
{plantToken[0]?.value === "usflo1" && (

View File

@@ -3,6 +3,7 @@ import { useRef, useState } from "react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { useAuth } from "@/lib/authClient";
import { useNavigate, useRouterState } from "@tanstack/react-router";
export default function ForecastImport(props: any) {
const fileInputRef: any = useRef(null);
@@ -10,7 +11,16 @@ export default function ForecastImport(props: any) {
//const token = localStorage.getItem("auth_token");
const { session } = useAuth();
//const [fileType, setFileType] = useState("");
const navigate = useNavigate();
const router = useRouterState();
const currentPath = router.location.href;
const importOrders = async (e: any) => {
if (!session || !session.user) {
toast.error("You are allowed to do this unless you are logged in");
navigate({ to: "/login", search: { redirect: currentPath } });
return;
}
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
@@ -34,7 +44,7 @@ export default function ForecastImport(props: any) {
headers: {
"Content-Type": "multipart/form-data",
},
},
}
);
//console.log("Upload successful:", response.data);
toast.success(response?.data?.message);

View File

@@ -3,13 +3,22 @@ import { useRef, useState } from "react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { useAuth } from "@/lib/authClient";
import { useNavigate, useRouterState } from "@tanstack/react-router";
export default function OrderImport(props: any) {
const fileInputRef: any = useRef(null);
const [posting, setPosting] = useState(false);
const { session } = useAuth();
const navigate = useNavigate();
const router = useRouterState();
const currentPath = router.location.href;
//const [fileType, setFileType] = useState("");
const importOrders = async (e: any) => {
if (!session || !session.user) {
toast.error("You are allowed to do this unless you are logged in");
navigate({ to: "/login", search: { redirect: currentPath } });
return;
}
const file = e.target.files[0];
if (!file) {
toast.error("Missing file please try again");
@@ -31,7 +40,7 @@ export default function OrderImport(props: any) {
headers: {
"Content-Type": "multipart/form-data",
},
},
}
);
//console.log("Upload successful:", response.data);
toast.success(response?.data?.message);

View File

@@ -0,0 +1 @@
ALTER TABLE "qualityRequest" ADD COLUMN "qualityDurationToInspect" integer;

View File

@@ -0,0 +1 @@
ALTER TABLE "qualityRequest" ADD COLUMN "returnDurationToInspect" integer;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -519,6 +519,20 @@
"when": 1760708711258,
"tag": "0073_bumpy_dust",
"breakpoints": true
},
{
"idx": 74,
"version": "7",
"when": 1762966327361,
"tag": "0074_overconfident_may_parker",
"breakpoints": true
},
{
"idx": 75,
"version": "7",
"when": 1762983466464,
"tag": "0075_tan_unicorn",
"breakpoints": true
}
]
}

View File

@@ -25,6 +25,8 @@ export const qualityRequest = pgTable(
warehouseMovedTo: text("warehouseMovedTo"),
locationMovedTo: text("locationMovedTo"),
durationToMove: integer("durationToMove"),
qualityDurationToInspect: integer("qualityDurationToInspect"),
returnDurationToInspect: integer("returnDurationToInspect"),
locationDropOff: text("locationDropOff"),
palletStatus: integer("palletStatus"),
palletStatusText: text("palletStatusText"),

View File

@@ -0,0 +1,88 @@
import {
addDays,
format,
formatISO,
isBefore,
parseISO,
startOfWeek,
} from "date-fns";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { materialPerDay } from "../../../sqlServer/querys/dataMart/materialPerDay.js";
function toDate(val: any) {
if (val instanceof Date) return val;
if (typeof val === "string") return parseISO(val.replace(" ", "T"));
return new Date(val);
}
export function sumByMaterialAndWeek(data: any) {
/** @type {Record<string, Record<string, number>>} */
const grouped: any = {};
for (const r of data) {
const mat = String(r.MaterialHumanReadableId);
const d = toDate(r.CalDate);
const week = formatISO(startOfWeek(d, { weekStartsOn: 1 }), {
representation: "date",
});
grouped[mat] ??= {};
grouped[mat][week] ??= 0;
grouped[mat][week] += Number(r.DailyMaterialDemand) || 0;
}
const result = [];
for (const [mat, weeks] of Object.entries(grouped)) {
// @ts-ignore
for (const [week, total] of Object.entries(weeks)) {
result.push({
MaterialHumanReadableId: mat,
WeekStart: week,
WeeklyDemand: Number(total).toFixed(2),
});
}
}
return result;
}
export default async function materialPerDayCheck() {
/**
* getting the shipped pallets
*/
const startDate = format(new Date(Date.now()), "yyyy-MM-dd");
const endDate = format(addDays(new Date(Date.now()), 90), "yyyy-MM-dd");
const { data, error } = (await tryCatch(
query(
materialPerDay
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"material check",
),
)) as any;
if (error) {
return {
success: false,
message: "Error getting the material data",
error,
};
}
if (!data.success) {
return {
success: false,
message: data.message,
data: [],
};
}
return {
success: true,
message: "material data",
data: sumByMaterialAndWeek(data.data),
};
}

View File

@@ -1,18 +1,17 @@
import { OpenAPIHono } from "@hono/zod-openapi";
import sendemail from "./routes/sendMail.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { db } from "../../../database/dbclient.js";
import { notifications } from "../../../database/schema/notifications.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { createLog } from "../logger/logger.js";
import fifoIndex from "./routes/fifoIndex.js";
import notifyStats from "./routes/getActiveNotifications.js";
import notify from "./routes/getNotifications.js";
import tiTrigger from "./routes/manualTiggerTi.js";
import materialCheck from "./routes/materialPerDay.js";
import blocking from "./routes/qualityBlocking.js";
import sendemail from "./routes/sendMail.js";
import { note, notificationCreate } from "./utils/masterNotifications.js";
import { startNotificationMonitor } from "./utils/processNotifications.js";
import notifyStats from "./routes/getActiveNotifications.js";
import tiTrigger from "./routes/manualTiggerTi.js";
import blocking from "./routes/qualityBlocking.js";
import notify from "./routes/getNotifications.js";
import fifoIndex from "./routes/fifoIndex.js";
const app = new OpenAPIHono();
@@ -23,6 +22,7 @@ const routes = [
blocking,
notify,
fifoIndex,
materialCheck,
] as const;
const appRoutes = routes.forEach((route) => {
@@ -38,7 +38,7 @@ app.all("/notify/*", (c) => {
// check if the mastNotications is changed compared to the db and add if needed.
const { data: notes, error: notesError } = await tryCatch(
db.select().from(notifications)
db.select().from(notifications),
);
if (notesError) {
@@ -47,8 +47,8 @@ if (notesError) {
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
notesError
)}`
notesError,
)}`,
);
}

View File

@@ -0,0 +1,37 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import fifoIndexCheck from "../controller/notifications/fifoIndex.js";
import materialPerDayCheck from "../controller/notifications/materialPerDay.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["notify"],
summary: "",
method: "get",
path: "/materialperday",
//middleware: authMiddleware,
responses: responses(),
}),
async (c) => {
/**
* get the blocking notification stuff
*/
apiHit(c, { endpoint: "/materialperday" });
/**
* getting the shipped pallets
*/
const checkedData = await materialPerDayCheck();
return c.json({
success: checkedData.success,
message: checkedData.message,
data: checkedData.data,
});
},
);
export default app;

View File

@@ -1,31 +1,32 @@
import { differenceInMinutes } from "date-fns";
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { qualityRequest } from "../../../../database/schema/qualityRequest.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
export const addNewPallet = async (data: any, user: string) => {
export const addNewPallet = async (data: any) => {
/**
* Post new pallets
*/
if (parseInt(data.runningNr) <= 0) {
return {
sucess: false,
success: false,
message: "Please add a valid running number.",
};
}
const updateQuery = qrequestQuery.replaceAll(
"[runningNumber]",
data.runningNr
data.runningNr,
);
const { data: c, error: ce } = await tryCatch(
db
.select()
.from(qualityRequest)
.where(eq(qualityRequest.runningNr, data.runningNr))
.where(eq(qualityRequest.runningNr, data.runningNr)),
);
if (ce) {
return {
@@ -37,23 +38,22 @@ export const addNewPallet = async (data: any, user: string) => {
const palletData: any = c;
// if the pallet exist then tell the user to check on it
if (
(palletData && palletData[0]?.palletStatus === 1) ||
palletData[0]?.palletStatus === 4
) {
const pStatus = [1, 4, 6];
if (palletData && pStatus.includes(palletData[0]?.palletStatus)) {
return {
success: false,
message: `Running number ${data.runningNr} is already pending or reactivated please follow up with the warehouse team on status to be moved.`,
};
}
// update the existing pallet if already in the system
if (palletData.length > 0) {
try {
// get the pallet info from stock
const { data: pa, error: pe } = await tryCatch(
query(updateQuery, "quality request")
query(updateQuery, "quality request"),
);
const p: any = pa;
const p: any = pa ? pa.data : [];
if (pe) {
return {
success: false,
@@ -66,19 +66,26 @@ export const addNewPallet = async (data: any, user: string) => {
locationAtRequest: p[0].locationAtRequest,
warehouseMovedTo: null,
locationMovedTo: null,
palletStatus: 4,
durationToMove: 0,
palletStatusText: "reactivated",
palletStatus: data.palletStatusText === "return" ? 6 : 4,
//durationToMove: 0,
palletStatusText:
data.palletStatusText === "return" ? "return" : "reactivated",
qualityDurationToInspect:
data.palletStatusText === "return"
? differenceInMinutes(new Date(Date.now()), p[0].lastMove)
: 0,
locationDropOff:
data.palletStatusText === "return" ? "Return to warhouse" : "",
palletRequest: palletData[0].palletStatus + 1,
upd_user: user,
upd_date: new Date(timeZoneFix()),
upd_user: data.user,
upd_date: sql`NOW()`,
};
const { data: u, error } = await tryCatch(
db
.update(qualityRequest)
.set(pData)
.where(eq(qualityRequest.runningNr, data.runningNr))
.where(eq(qualityRequest.runningNr, data.runningNr)),
);
if (error) {
@@ -97,10 +104,10 @@ export const addNewPallet = async (data: any, user: string) => {
};
}
} catch (error) {
console.log(error);
return {
success: false,
message:
"There was an error updating the pallet in quality request",
message: "There was an error updating the pallet in quality request",
data: error,
};
}
@@ -109,13 +116,14 @@ export const addNewPallet = async (data: any, user: string) => {
// add new pallet
try {
const { data: px, error: pe } = await tryCatch(
query(updateQuery, "quality request")
query(updateQuery, "quality request"),
);
const p: any = px;
const p: any = px ? px.data : [];
if (p.length === 0) {
return {
success: false,
message: `Running Number ${data.runningNr} dose not exist in stock.`,
message: `Running Number ${data.runningNr} dose not exist in stock, please check the running number and try again.`,
};
}
@@ -126,7 +134,7 @@ export const addNewPallet = async (data: any, user: string) => {
data: pe,
};
}
console.log(p);
const nData = {
article: p[0].article,
description: p[0].description,
@@ -138,12 +146,12 @@ export const addNewPallet = async (data: any, user: string) => {
palletStatus: 1,
palletStatusText: "pending",
palletRequest: 1,
add_user: user,
upd_user: user,
add_user: data.user,
upd_user: data.user,
};
const { data: u, error } = await tryCatch(
db.insert(qualityRequest).values(nData)
db.insert(qualityRequest).values(nData).onConflictDoNothing(),
);
if (error) {
@@ -162,6 +170,7 @@ export const addNewPallet = async (data: any, user: string) => {
};
}
} catch (error) {
console.log(error);
return {
success: false,
message: "There was an error adding the pallet in quality request",

View File

@@ -1,20 +1,23 @@
import { eq, inArray } from "drizzle-orm";
import { differenceInMinutes } from "date-fns";
import { eq, inArray, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { qualityRequest } from "../../../../database/schema/qualityRequest.js";
import { delay } from "../../../globalUtils/delay.js";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { differenceInMinutes } from "date-fns";
import { timeZoneFix } from "../../../globalUtils/timeZoneFix.js";
import { qrequestQuery } from "../../sqlServer/querys/quality/request.js";
export const qualityCycle = async () => {
/**
* Cycles the pallets in the quality request to see whats been moved or changed.
*/
const warehouse = [1, 4, 5];
// pallet request check interval 5min check to start
//setInterval(async () => {
setInterval(
async () => {
// create the date stuff
const currentTime = new Date(Date.now());
@@ -23,7 +26,7 @@ export const qualityCycle = async () => {
db
.select()
.from(qualityRequest)
.where(inArray(qualityRequest.palletStatus, [1, 4, 5]))
.where(inArray(qualityRequest.palletStatus, [1, 4, 5, 6])),
);
if (error) {
@@ -31,7 +34,7 @@ export const qualityCycle = async () => {
"error",
"lst",
"quality",
`There was an error getting quality request data: ${error}`
`There was an error getting quality request data: ${error}`,
);
return {
success: false,
@@ -48,15 +51,15 @@ export const qualityCycle = async () => {
// console.log(lstQData[i]);
//update query with plant token
// change the update the pallet number
// update the the pallet number
const qPalletNumber = qrequestQuery.replaceAll(
"[runningNumber]",
lstQData[i].runningNr
lstQData[i].runningNr,
);
const queryData: any = await query(
qPalletNumber,
"Quality update check"
"Quality update check",
);
let prodData: any =
queryData?.data.length === 0 ? [] : queryData.data;
@@ -68,13 +71,24 @@ export const qualityCycle = async () => {
const qDataPost = {
warehouseMovedTo: prodData[0]?.warehouseAtRequest,
locationMovedTo: prodData[0]?.locationAtRequest,
durationToMove: differenceInMinutes(
timeZoneFix(),
lstQData[i].upd_date
),
// how ling did it take the warhouse to originally move the pallet
durationToMove: warehouse.includes(lstQData[i].palletStatus)
? differenceInMinutes(
new Date(Date.now()),
lstQData[i].upd_date,
)
: lstQData[i].durationToMove,
// how long did it take warehouse to move the pallet back agian
returnDurationToInspect:
lstQData[i].palletStatus === 7
? differenceInMinutes(
new Date(Date.now()),
lstQData[i].upd_date,
)
: lstQData[i].qualityDurationToInspect,
palletStatus: 2,
palletStatusText: "moved",
upd_date: new Date(timeZoneFix()),
upd_date: sql`NOW()`,
upd_user: "LST_System",
};
@@ -87,24 +101,27 @@ export const qualityCycle = async () => {
"info",
"lst",
"quality",
`Pallet ${lstQData[i].runningNr} was updated`
`Pallet ${lstQData[i].runningNr} was updated`,
);
} else {
createLog(
"debug",
"info",
"lst",
"quality",
`Pallet ${
lstQData[i].runningNr
} has not been moved yet it has been pending for ${differenceInMinutes(
timeZoneFix(),
lstQData[i].upd_date
)} min(s)`
new Date(Date.now()),
lstQData[i].upd_date,
)} min(s)`,
);
}
}
await delay(150);
} else {
createLog("debug", "lst", "quality", "nothing to update");
createLog("info", "lst", "quality", "nothing to update");
}
//}, 5 * 60 * 1000); // every 5 min
},
5 * 60 * 1000,
); // every 5 min
};

View File

@@ -10,6 +10,8 @@ export const statusOptions = [
{ name: "removed", uid: "3" },
{ name: "reactivated", uid: "4" },
{ name: "canceled", uid: "5" },
{ name: "return", uid: "6" },
{ name: "readyToReturn", uid: "7" },
];
const app = new OpenAPIHono();

View File

@@ -1,17 +1,17 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getRequest } from "../controller/getRequests.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { addNewPallet } from "../controller/addNewPallet.js";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { getRequest } from "../controller/getRequests.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
runningNr: z.number().openapi({ example: 1528 }),
moveTo: z.string().openapi({ example: "rework" }),
moveTo: z.string().optional().openapi({ example: "rework" }),
});
app.openapi(
createRoute({
@@ -30,11 +30,11 @@ app.openapi(
responses: responses(),
}),
async (c) => {
const authHeader = c.req.header("Authorization");
const token = authHeader?.split("Bearer ")[1] || "";
// const authHeader = c.req.header("Authorization");
// const token = authHeader?.split("Bearer ")[1] || "";
const payload = await verify(token, process.env.JWT_SECRET!);
const user: any = payload.user;
// const payload = await verify(token, process.env.JWT_SECRET!);
// const user: any = payload.user;
const { data: b, error: e } = await tryCatch(c.req.json());
apiHit(c, { endpoint: "/newrequest", lastBody: b });
@@ -53,9 +53,7 @@ app.openapi(
// });
// }
const { data, error } = await tryCatch(
addNewPallet(body, user?.username)
);
const { data, error } = await tryCatch(addNewPallet(body));
if (error) {
return c.json({
@@ -69,6 +67,6 @@ app.openapi(
message: data?.message,
data: data?.data,
});
}
},
);
export default app;

View File

@@ -0,0 +1,115 @@
/**
* This will return the material demand per day
* startdate and end date should be passed over
*/
export const materialPerDay = `
use [test3_AlplaPROD2.0_Read]
DECLARE @ShiftStartHour INT = 6
declare @startDate nvarchar(max) = '[startDate]'
declare @endDate nvarchar(max) = '[endDate]'
;with Calendar as (
select cast(@startDate as date) CalDate
union all
select dateadd(day,1,CalDate)
from Calendar
where CalDate < @endDate
),
DailySplit AS (
SELECT
-- Lot fields
l.Id AS ProductionLotId,
l.ProductionLotHumanReadableId,
l.ArticleHumanReadableId,
l.ArticleDescription,
l.LocationId,
l.MachineHumanReadableId,
l.MachineDescription,
l.StartDate,
l.FinishDate,
l.ProductionCustomerDescription,
l.ProductionCustomerHumanReadableId,
l.PlannedQuantity,
l.PlannedLoadingUnit,
l.Cavity,
l.Utilisation,
l.TotalMaterialDemand AS LotTotalMaterialDemand,
-- Material fields
m.MaterialHumanReadableId,
m.MaterialDescription,
m.TotalDemand AS LotMaterialTotalDemand,
c.CalDate,
DATEDIFF(SECOND,l.StartDate,l.FinishDate) AS LotDurationSec,
-- build shiftbased 24hour window (e.g. 06:00 → next day06:00)
CASE
WHEN l.StartDate > DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2(7)))
THEN l.StartDate
ELSE DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2(7)))
END AS DayStart,
CASE
WHEN l.FinishDate < DATEADD(SECOND,-0.0000001,
DATEADD(HOUR,@ShiftStartHour,
DATEADD(DAY,1,CAST(c.CalDate AS DATETIME2(7)))))
THEN l.FinishDate
ELSE DATEADD(SECOND,-0.0000001,
DATEADD(HOUR,@ShiftStartHour,
DATEADD(DAY,1,CAST(c.CalDate AS DATETIME2(7)))))
END AS DayEnd
FROM [issueMaterial].[ProductionLot] (nolock) AS l
LEFT JOIN [issueMaterial].[MaterialDemand] (nolock) AS m
ON m.ProductionLotId = l.Id
CROSS JOIN Calendar AS c
WHERE DATEADD(HOUR,@ShiftStartHour,CAST(c.CalDate AS DATETIME2))
< l.FinishDate
AND DATEADD(HOUR,@ShiftStartHour+24,CAST(c.CalDate AS DATETIME2))
> l.StartDate
--and l.[ProductionLotHumanReadableId] = 26364
),
Fraction AS (
SELECT
ds.*,
DATEDIFF(SECOND,ds.DayStart,ds.DayEnd) AS OverlapSec
FROM DailySplit ds
),
Normalized AS (
SELECT
f.*,
f.OverlapSec * 1.0 /
NULLIF(SUM(f.OverlapSec) OVER
(PARTITION BY f.ProductionLotId, f.MaterialHumanReadableId),0)
AS NormalizedFraction
FROM Fraction f
)
SELECT
n.ProductionLotId,
n.ProductionLotHumanReadableId,
n.ArticleHumanReadableId,
n.ArticleDescription,
n.LocationId,
n.MachineHumanReadableId,
n.MachineDescription,
n.StartDate,
n.FinishDate,
n.ProductionCustomerDescription,
n.ProductionCustomerHumanReadableId,
n.PlannedQuantity,
n.PlannedLoadingUnit,
n.Cavity,
n.Utilisation,
n.LotTotalMaterialDemand,
n.MaterialHumanReadableId,
n.MaterialDescription,
n.LotMaterialTotalDemand,
n.CalDate,
n.NormalizedFraction * n.LotMaterialTotalDemand AS DailyMaterialDemand
FROM Normalized n
ORDER BY
n.MaterialHumanReadableId,
n.CalDate,
n.ProductionLotHumanReadableId
OPTION (MAXRECURSION 0);
`;