Compare commits

..

16 Commits

Author SHA1 Message Date
36a805c652 refactor(scripts): create finance bol 2026-02-16 09:40:19 -06:00
460bc3d24a feat(query selector): queryselector from file based vs cp to ts filesz 2026-02-16 09:40:00 -06:00
ec201fcfb5 refactor(sql): full changes to localhost if on produciton server 2026-02-16 09:39:35 -06:00
914ad46c43 refactor(sql server): changes to look at localhost if in production 2026-02-16 09:38:55 -06:00
b96c546ed3 refactor(notify): changed to only allow max 100 errors in the email 2026-02-16 09:38:31 -06:00
29b3be41a1 build(notification): fixed fifo index ts errors 2026-02-16 09:38:10 -06:00
16edf58025 refactor(eom): changes to hist inv 2026-02-16 09:37:40 -06:00
775627f215 feat(scanner): tcp scanner connection based on env var no more db stuff 2026-02-16 09:37:14 -06:00
4e70fae69b refactor(api docs): added/changed docs 2026-02-16 09:36:44 -06:00
24dd109a21 fix(commandlog): changes to the log table 2026-02-13 16:08:23 -06:00
38b57a00cc refactor(datamart): article changes to add pet-g 2026-02-13 16:03:26 -06:00
f8070db95f fix(sqlserver): changed to proper pool connection 2026-02-13 16:02:43 -06:00
10e9dc430c fix(notification): limited to 1000 max errors 2026-02-13 15:59:38 -06:00
6b669ccd9c fix(labelinfo): corrected the query on label info for external 2026-02-13 14:51:15 -06:00
d9a10d98a1 refactor(sendmail): change the send mail function from noreply to donotreply 2026-02-13 14:50:44 -06:00
e64dc7c013 refactor(ocp): removed zechetti 2 from this silly thing for now 2026-02-13 14:50:07 -06:00
24 changed files with 903 additions and 338 deletions

View File

@@ -1,6 +1,6 @@
{ {
"version": "1", "version": "1",
"name": "LogisticsSupportTool_API_DOCS", "name": "lstv2",
"type": "collection", "type": "collection",
"ignore": [ "ignore": [
"node_modules", "node_modules",

View File

@@ -1,5 +1,5 @@
vars { vars {
url: http://localhost:4200 url: http://localhost:5500
session_cookie: session_cookie:
urlv2: http://usbow1vms006:3000 urlv2: http://usbow1vms006:3000
jwtV2: jwtV2:

View File

@@ -0,0 +1,24 @@
meta {
name: bookout
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/logistics/bookout
body: json
auth: none
}
body:json {
{
"runningNr": "1864553",
"reason": "30006"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 7
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,24 @@
meta {
name: relocate
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/relocate
body: json
auth: inherit
}
body:json {
{
"runningNr": "56121541",
"laneID": "30006"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,24 @@
meta {
name: removeAsWaste
type: http
seq: 3
}
post {
url: {{url}}/lst/old/api/logistics/bookout
body: json
auth: none
}
body:json {
{
"runningNr": "1865027",
"reason": "packer printed premature"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -1,20 +1,20 @@
import { text, pgTable, timestamp, uuid, jsonb } from "drizzle-orm/pg-core"; import { jsonb, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod"; import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { z } from "zod"; import { z } from "zod";
export const commandLog = pgTable( export const commandLog = pgTable(
"commandLog", "commandLog",
{ {
commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(), commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(),
commandUsed: text("commandUsed").notNull(), commandUsed: text("commandUsed").notNull(),
bodySent: jsonb("bodySent").default([]), bodySent: jsonb("bodySent").default([]),
reasonUsed: text("reasonUsed"), reasonUsed: text("reasonUsed"),
add_at: timestamp("add_Date").defaultNow(), addDate: timestamp("add_Date").defaultNow(),
}, },
(table) => [ (table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name), // uniqueIndex("role_name").on(table.name),
] ],
); );
// Schema for inserting a user - can be used to validate API requests // Schema for inserting a user - can be used to validate API requests

View File

@@ -0,0 +1,187 @@
/**
* Using this to make a scanner connection to the server.
*/
import net from "net";
interface QueuedCommand {
command: string;
resolve: (value: string) => void;
reject: (reason?: any) => void;
timeout: NodeJS.Timeout;
}
const STX = "\x02";
const ETX = "\x03";
// const prodIP = process.env.SERVER_IP as string;
// const prodPort = parseInt(process.env.SCANNER_PORT || "50000", 10);
// const scannerID = `${process.env.SCANNER_ID}@`;
//const scannerCommand = "AlplaPRODcmd00000042#000028547"; // top of the picksheet
export class ScannerClient {
private socket = new net.Socket();
private connected = false;
private queue: QueuedCommand[] = [];
private processing = false;
private incomingBuffer = "";
constructor(
private host: string,
private port: number,
private scannerId: string,
) {
this.initialize();
}
private initialize() {
this.socket.connect(this.port, this.host, () => {
console.info("Connected to scanner");
this.connected = true;
});
this.socket.on("data", (data) => this.handleData(data));
this.socket.on("close", () => {
console.log("Scanner connection closed");
this.connected = false;
});
this.socket.on("error", (err) => {
console.error("Scanner error:", err);
});
}
// ✅ Public method you use
public scan(command: string): Promise<string> {
if (!this.connected) {
return Promise.reject("Scanner not connected");
}
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
this.processing = false;
reject("Scanner timeout");
this.processQueue();
}, 5000); // 5s safety timeout
this.queue.push({
command,
resolve,
reject,
timeout,
});
this.processQueue();
});
}
// ✅ Ensures strict FIFO processing
private processQueue() {
if (this.processing) return;
if (this.queue.length === 0) return;
this.processing = true;
const current = this.queue[0];
const message = Buffer.from(
`${STX}${this.scannerId}${current.command}${ETX}`,
"ascii",
);
this.socket.write(message);
}
// ✅ Handles full STX/ETX framed responses
private handleData(data: Buffer) {
console.log(
"ASCII:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
const current = this.queue.shift();
if (current) {
clearTimeout(current.timeout);
current.resolve(data.toString("ascii"));
}
this.processing = false;
this.processQueue();
}
}
export const scanner = new ScannerClient(
process.env.SERVER_IP!,
parseInt(process.env.SCANNER_PORT!, 10),
`${process.env.SCANNER_ID}@`,
);
// export const connectToScanner = () => {
// if (!process.env.SERVER_IP || !process.env.SCANNER_PORT) {
// return {
// success: false,
// message: "Missing ServerIP or ServerPort",
// };
// }
// scanner.connect(prodPort, prodIP, () => {
// console.log("Connected to scanner");
// connected = true;
// });
// };
// export const scan = async (command: string) => {
// if (!connected) {
// return {
// success: false,
// message: "Scanner is not connected, please contact admin",
// };
// }
// if (inScanCommand) {
// bufferCommands.push({ timeStamp: new Date(Date.now()), command: command });
// }
// // we are going to set to scanning
// inScanCommand = true;
// const message = Buffer.from(`${STX}${scannerID}${command}${ETX}`, "ascii");
// scanner.write(message);
// await new Promise((resolve) => setTimeout(resolve, 750));
// inScanCommand = false;
// if (bufferCommands.length > 0) {
// await scan(bufferCommands[0].command);
// bufferCommands.shift();
// }
// return {
// success: true,
// message: "Scan completed",
// };
// };
// scanner.on("data", async (data) => {
// console.log(
// "Response:",
// data
// .toString("ascii")
// .replace(/\x00/g, "") // remove null bytes
// .replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
// .trim(),
// );
// });
// scanner.on("close", () => {
// console.log("Connection closed");
// });
// scanner.on("error", (err) => {
// console.error("Scanner error:", err);
// });

View File

@@ -63,10 +63,10 @@ setTimeout(async () => {
// the time we want to run the hostircal data should be the same time the historical data run on the server // the time we want to run the hostircal data should be the same time the historical data run on the server
// getting this from the shift time // getting this from the shift time
if (process.env.NODE_ENV?.trim() !== "production") { //if (process.env.NODE_ENV?.trim() !== "production") {
setTimeout(() => { setTimeout(() => {
historicalInvIMmport(); historicalInvIMmport();
}, 15 * 1000); }, 15 * 1000);
} //}
export default app; export default app;

View File

@@ -0,0 +1,155 @@
import axios from "axios";
import net from "net";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
type Data = {
runningNr: number;
reason: string;
user: string;
};
export const bookOutPallet = async (data: Data) => {
const { runningNr, reason, user } = data;
if (!reason || reason.length < 4) {
return {
success: false,
status: 400,
message: "The reason provided is to short",
data: [],
};
}
const queryCheck = sqlQuerySelector("inventoryInfo.query");
if (!queryCheck.success) {
return {
success: false,
status: 400,
message: queryCheck.message,
data: data,
};
}
const { data: label, error: labelError } = (await tryCatch(
query(
queryCheck.query!.replace("[runningNr]", `${runningNr}`),
"labelQuery",
),
)) as any;
if (labelError) {
return {
success: false,
status: 400,
message: labelError.message,
data: labelError,
};
}
// check if we are in ppoo
if (label.data.length <= 0) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently in ppoo, please move to ppoo before trying to book-out`,
data: [],
};
}
// check if the label is blocked for coa.
if (
label.data[0].blockingReason &&
!label.data[0].blockingReason?.includes("COA")
) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently blocked for coa, to get this pallet booked out please take the label to quality to be released then you can book-out.`,
data: [],
};
}
if (label.data[0].blockingReason) {
await scanner.scan("AlplaPRODcmd89");
await scanner.scan(`${label.data[0].barcode}`);
}
// create the url to post
const url = await prodEndpointCreation(
"/public/v1.1/Manufacturing/ProductionControlling/BookOut",
);
const SSCC = await createSSCC(runningNr);
const bookOutData = {
sscc: SSCC.slice(2),
scannerId: "666",
};
try {
const results = await axios.post(url, bookOutData, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
if (results.data.Errors) {
return {
success: false,
status: 400,
message: results.data.Errors.Error.Description,
};
}
// if (results.data.Result !== 0) {
// console.log("stopping here and closing to soon", results);
// return {
// success: false,
// status: 400,
// message: results.data.Message,
// };
// }
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "book out",
bodySent: data,
reasonUsed: reason,
}),
);
return {
success: true,
message: `${runningNr} was booked out`,
status: results.status,
};
} catch (error: any) {
console.log(bookOutData);
return {
success: false,
status: 400,
message: error.response?.data,
data: error.response?.data,
};
}
// });
/**
* book out the label with
* url /public/v1.1/Manufacturing/ProductionControlling/BookOut
* {
* "sscc": "string",
* "scannerId": "string"
* }
*/
//---------------------------------------------------------------------------------------\\
};

View File

@@ -1,120 +1,50 @@
import axios from "axios";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { lstAuth } from "../../../../index.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { db } from "../../../../../database/dbclient.js"; import { db } from "../../../../../database/dbclient.js";
import net from "net"; import { commandLog } from "../../../../../database/schema/commandLog.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js"; import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
import { settings } from "../../../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { serverData } from "../../../../../database/schema/serverData.js";
export const removeAsNonReusable = async (data: any) => { export const removeAsNonReusable = async (data: any) => {
// const removalUrl = await prodEndpointCreation( // get the label info
// "/public/v1.0/Warehousing/RemoveAsNonReusableMaterial" const { data: label, error: labelError } = (await tryCatch(
// ); query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info"),
)) as any;
// const sscc = await createSSCC(data.runningNr); if (label.data[0].stockStatus === "notOnStock") {
return {
success: false,
message: `The label: ${data.runningNr} is not currently in stock`,
data: [],
};
}
// const { data: remove, error } = await tryCatch( if (label.data[0].blockingReason) {
// axios.post( return {
// removalUrl, success: false,
// { scannerId: "500", sscc: sscc.slice(2) }, status: 400,
// { message: `${data.runningNr} is currently blocked, to get this pallet removed please take the label to quality to be released then you can remove.`,
// headers: { Authorization: `Basic ${lstAuth}` }, data: [],
// } };
// ) }
// );
// use a scanner tcp connection to trigger this process await scanner.scan("AlplaPRODcmd23");
const STX = "\x02"; await scanner.scan(`${label.data[0].barcode}`);
const ETX = "\x03";
const scanner = new net.Socket();
let stage = 0;
// get the label info
const { data: label, error: labelError } = (await tryCatch(
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info")
)) as any;
if (label.data[0].stockStatus === "notOnStock") { let reason = data.reason || "";
return { delete data.reason;
success: false,
message: `The label: ${data.runningNr} is not currently in stock`,
data: [],
};
}
// get the server ip based on the token. const { data: commandL, error: ce } = await tryCatch(
const setting = await db.select().from(settings); db.insert(commandLog).values({
commandUsed: "removeAsNonReusable",
bodySent: data,
reasonUsed: reason,
}),
);
const plantInfo = await db.select().from(serverData); return {
const plantToken = setting.filter((n: any) => n.name === "plantToken"); success: true,
const scannerID = setting.filter((n: any) => n.name === "scannerID"); message: `The label: ${data.runningNr}, was removed`,
const scannerPort = setting.filter((n: any) => n.name === "scannerPort"); data: [],
const plantData = plantInfo.filter( };
(p: any) => p.plantToken === plantToken[0].value
);
scanner.connect(
parseInt(scannerPort[0].value),
plantData[0].idAddress!,
async () => {
// need to get the ip from the server data and scanner port
//console.log(`connected to scanner`);
scanner.write(`${STX}${scannerID[0].value}@AlplaPRODcmd23${ETX}`);
}
);
scanner.on("data", (data) => {
const response = data.toString();
//console.log("Received:", response.trimStart());
if (stage === 0) {
stage = 1;
scanner.write(
`${STX}${scannerID[0].value}@${label.data[0].Barcode}${ETX}`
);
} else if (stage === 1) {
scanner.end();
}
});
scanner.on("close", () => {
//console.log("Connection closed");
scanner.destroy();
});
scanner.on("error", (err) => {
//console.error("Scanner error:", err);
scanner.destroy();
return {
success: false,
message: `The label: ${data.runningNr} encountering an error while being removed, please try again`,
data: [],
};
});
// if (error) {
// //console.log(error);
// return {
// success: false,
// message: `There was an error removing ${data.runningNr}`,
// data: [],
// };
// }
let reason = data.reason || "";
delete data.reason;
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "removeAsNonReusable",
bodySent: data,
reasonUsed: reason,
})
);
return {
success: true,
message: `The label: ${data.runningNr}, was removed`,
data: [],
};
}; };

View File

@@ -1,7 +1,9 @@
import { OpenAPIHono } from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js"; import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js";
import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js"; import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js";
import attachSilo from "./route/attachSilo.js"; import attachSilo from "./route/attachSilo.js";
import bookOutPallet from "./route/bookout.js";
import comsumeMaterial from "./route/consumeMaterial.js"; import comsumeMaterial from "./route/consumeMaterial.js";
import detachSilo from "./route/detachSilo.js"; import detachSilo from "./route/detachSilo.js";
import postBulkOrders from "./route/dm/bulkOrdersIn.js"; import postBulkOrders from "./route/dm/bulkOrdersIn.js";
@@ -56,6 +58,7 @@ const routes = [
// logisitcs // logisitcs
removeAsNonReable, removeAsNonReable,
getSSCC, getSSCC,
bookOutPallet,
] as const; ] as const;
// app.route("/server", modules); // app.route("/server", modules);

View File

@@ -0,0 +1,87 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
//import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { bookOutPallet } from "../controller/commands/bookout.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Consumes material based on its running number",
method: "post",
path: "/bookout",
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
const { data, error } = await tryCatch(c.req.json());
if (error) {
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
apiHit(c, { endpoint: "/bookout", lastBody: data });
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
//const pointData = { ...data, user: payload.user };
const bookout = await bookOutPallet(data);
console.log("from booout:", bookout);
return c.json(
{
success: bookout?.success,
message: bookout?.message,
data: bookout.data,
},
200,
);
} catch (error) {
console.log("from error:", error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
},
);
export default app;

View File

@@ -1,112 +1,112 @@
import { isBefore } from "date-fns"; import { isBefore } from "date-fns";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js"; import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js";
import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js"; import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
export default async function fifoIndexCheck() { export default async function fifoIndexCheck() {
/** /**
* getting the shipped pallets * getting the shipped pallets
*/ */
const { data: shipped, error: eShipped } = await tryCatch( const { data: shipped, error: eShipped } = await tryCatch(
query(shippedPallets, "notify shipped pallets") query(shippedPallets, "notify shipped pallets"),
); );
const { data: currentStuff, error: eCurrentInv } = await tryCatch( const { data: currentStuff, error: eCurrentInv } = await tryCatch(
query(currentInv, "notify shipped pallets") query(currentInv, "notify shipped pallets"),
); );
// console.log(shipped?.data[2]); // console.log(shipped?.data[2]);
// console.log(currentStuff?.data[2]); // console.log(currentStuff?.data[2]);
/** /**
* We want to check if the each shippened pallet is out of fifo * We want to check if the each shippened pallet is out of fifo
*/ */
const check = shipped?.data.map((n: any) => { const check: any = shipped?.data.map((n: any) => {
/** /**
* Returns all data so we know if we are in or out. * Returns all data so we know if we are in or out.
*/ */
//check if there are pallets older than the current one we are mapped on. //check if there are pallets older than the current one we are mapped on.
const fifoCheck = currentStuff?.data.filter( const fifoCheck = currentStuff?.data.filter(
(i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av (i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av,
); ) as any;
//console.log(fifoCheck.length); //console.log(fifoCheck.length);
if (fifoCheck.length > 0) { if (fifoCheck.length > 0) {
// console.log("Out of fifo", { // console.log("Out of fifo", {
// av: n.av, // av: n.av,
// rn: n.runningNr, // rn: n.runningNr,
// fRn: fifoCheck[0].runningNr, // fRn: fifoCheck[0].runningNr,
// dates: [fifoCheck[0].prodDate, n.prodDate], // dates: [fifoCheck[0].prodDate, n.prodDate],
// }); // });
} }
return { return {
...n, ...n,
// currentInv: fifoCheck[0], // currentInv: fifoCheck[0],
fifoFollowed: fifoCheck.length === 0 ? true : false, fifoFollowed: fifoCheck.length === 0 ? true : false,
}; };
}); });
/** /**
* lets see just the av that is our or in * lets see just the av that is our or in
*/ */
const avCheck = (check: any) => { const avCheck = (check: any) => {
/** /**
* This will only return the data based on out of fifo. * This will only return the data based on out of fifo.
*/ */
// check how many times each av showed up // check how many times each av showed up
const avCounts = check.reduce((a: any, c: any) => { const avCounts = check.reduce((a: any, c: any) => {
if (c.fifoFollowed === false) { if (c.fifoFollowed === false) {
const avValue = c.av; const avValue = c.av;
a[avValue] = (a[avValue] || 0) + 1; a[avValue] = (a[avValue] || 0) + 1;
} }
return a; return a;
}, {}); }, {});
// transform them back to an avCount Object // transform them back to an avCount Object
const result = Object.keys(avCounts).map((av) => ({ const result = Object.keys(avCounts).map((av) => ({
av: parseInt(av, 10), av: parseInt(av, 10),
count: avCounts[av], count: avCounts[av],
})); }));
return result; return result;
}; };
const outOfFifo: any = avCheck(check); const outOfFifo: any = avCheck(check);
const totalOut = outOfFifo.reduce((sum: any, c: any) => { const totalOut = outOfFifo.reduce((sum: any, c: any) => {
return sum + c.count; return sum + c.count;
}, 0); }, 0);
/** /**
* add the data to the db * add the data to the db
*/ */
for (let i = 0; i < check.length; i++) { for (let i = 0; i < check!.length; i++) {
const { data: dbInsert, error: dbE } = await tryCatch( const { data: dbInsert, error: dbE } = await tryCatch(
db db
.insert(fifoIndex) .insert(fifoIndex)
.values({ .values({
lot: check[i].lot, lot: check[i].lot,
av: check[i].av, av: check[i].av,
runningNr: check[i].runningNr, runningNr: check[i].runningNr,
prodDate: check[i].prodDate, prodDate: check[i].prodDate,
fifoFollowed: check[i].fifoFollowed, fifoFollowed: check[i].fifoFollowed,
add_Date: check[i].add_Date, add_Date: check[i].add_Date,
}) })
.onConflictDoNothing() .onConflictDoNothing(),
); );
} }
return { return {
success: true, success: true,
message: "Fifo index data", message: "Fifo index data",
data: { data: {
palletsOut: check, palletsOut: check,
totalShipped: shipped?.data.length, totalShipped: shipped?.data.length,
inFifo: shipped?.data.length - totalOut, inFifo: shipped!.data.length - totalOut,
outOfFifoData: outOfFifo, outOfFifoData: outOfFifo,
}, },
}; };
} }

View File

@@ -70,7 +70,7 @@ export default async function tooManyErrors(notifyData: any) {
} ${errorLogData.length > 1 ? "errors" : "error"} in the last ${notifyData.checkInterval} min`, } ${errorLogData.length > 1 ? "errors" : "error"} in the last ${notifyData.checkInterval} min`,
template: "tooManyErrors", template: "tooManyErrors",
context: { context: {
data: errorLogData, data: errorLogData.slice(0, 100),
count: notifyData.notifiySettings.errorCount, count: notifyData.notifiySettings.errorCount,
time: notifyData.checkInterval, time: notifyData.checkInterval,
}, },

View File

@@ -94,7 +94,7 @@ export const sendEmail = async (data: any): Promise<any> => {
} as SMTPTransport.Options); } as SMTPTransport.Options);
// update the from email // update the from email
fromEmail = `noreply@alpla.com`; fromEmail = `donotreply@alpla.com`;
} }
// creating the handlbar options // creating the handlbar options

View File

@@ -1,29 +1,29 @@
import { createPlcMonitor } from "./plcController.js"; import { createPlcMonitor } from "./plcController.js";
export const zechettiConnect = () => { export const zechettiConnect = () => {
const config: any = { const config: any = {
controllers: [ controllers: [
{ {
id: "Zecchetti_1", id: "Zecchetti_1",
ip: "192.168.193.97", ip: "192.168.193.97",
slot: 0, slot: 0,
rpi: 250, rpi: 250,
printerId: 22, // grabbed from 2.0 printerId: 22, // grabbed from 2.0
tags: ["N7[0]"], tags: ["N7[0]"],
}, },
{ // {
id: "Zecchetti_2", // id: "Zecchetti_2",
ip: "192.168.193.111", // ip: "192.168.193.111",
slot: 0, // slot: 0,
rpi: 100, // rpi: 100,
printerId: 23, // printerId: 23,
tags: ["N8[0]"], // tags: ["N8[0]"],
}, // },
], ],
}; };
const monitor = createPlcMonitor(config); const monitor = createPlcMonitor(config);
// Start // Start
monitor.start(); monitor.start();
}; };

View File

@@ -8,7 +8,7 @@ import { createLog } from "../logger/logger.js";
import { serverSettings } from "../server/controller/settings/getSettings.js"; import { serverSettings } from "../server/controller/settings/getSettings.js";
import { prodSqlConfig } from "./utils/prodServerConfig.js"; import { prodSqlConfig } from "./utils/prodServerConfig.js";
let pool: any; let pool: sql.ConnectionPool;
let connected: boolean = false; let connected: boolean = false;
export const initializeProdPool = async () => { export const initializeProdPool = async () => {
if (!installed) { if (!installed) {
@@ -34,7 +34,9 @@ export const initializeProdPool = async () => {
(n: any) => n.name === "dbServer", (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); const serverUp = await checkHostnamePort(
`${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
);
if (!serverUp) { if (!serverUp) {
createLog( createLog(
@@ -88,7 +90,8 @@ export const initializeProdPool = async () => {
} }
try { try {
const config = await prodSqlConfig(); const config = await prodSqlConfig();
pool = await sql.connect(config!); pool = new sql.ConnectionPool(config!);
await pool.connect();
createLog( createLog(
"info", "info",
@@ -164,7 +167,9 @@ export async function query(queryToRun: string, name: string) {
const dbServer = serverSettings.filter( const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer", (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); const serverUp = await checkHostnamePort(
`${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
);
if (!serverUp) { if (!serverUp) {
createLog( createLog(

View File

@@ -61,7 +61,8 @@ V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PP' or V_Artikel.ArtikelvariantenTypBez= 'PP' or
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PET' or V_Artikel.ArtikelvariantenTypBez= 'PET' or
V_Artikel.ArtikelvariantenTypBez= 'PET-P' V_Artikel.ArtikelvariantenTypBez= 'PET-P' or
V_Artikel.ArtikelvariantenTypBez= 'PET-G'
THEN 'MM' THEN 'MM'
WHEN WHEN
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or

View File

@@ -0,0 +1,78 @@
/*
This query will return a single running number as long as its in stock.
To get all data comment out the lfdnr in the where statmen
*/
use AlplaPROD_test1
DECLARE @runningNumber nvarchar(max) = '[runningNr]' -- when saving in lst should be '[runningNr]'
select x.idartikelVarianten as av,
ArtikelVariantenAlias as alias,
x.Lfdnr as runningNumber,
round(sum(EinlagerungsMengeVPKSum),0) as totalPallets,
sum(EinlagerungsMengeSum) as totalPalletQTY,
round(sum(VerfuegbareMengeVPKSum),0) as avaliblePallets,
sum(VerfuegbareMengeSum) as avaliablePalletQTY,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as coaPallets,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as coaQTY,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as heldPallets,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as heldQTY
,IdProdPlanung as lot
,IdAdressen as addressID,
x.AdressBez as addressDescription
,x.IdLagerAbteilung as locationId
,x.lagerabteilungkurzbez as location
,lot.machine
,produktionsdatummin as productionDate
,'728'
+ RIGHT(CAST(YEAR(produktionsdatummin) AS varchar(4)), 1)
+ CAST(DATEDIFF(DAY, DATEFROMPARTS(YEAR(produktionsdatummin), 1, 1), produktionsdatummin) + 1 AS varchar(3))
+ CAST(lot.machine AS varchar(10)) as batch
,c.Description as blockingReason
,x.Barcode as barcode
--,*
from dbo.[V_LagerPositionenBarcodes] (nolock) x
left join
dbo.T_EtikettenGedruckt as l(nolock) on
x.Lfdnr = l.Lfdnr AND l.Lfdnr > 1
left join
(SELECT *
FROM [dbo].[T_BlockingDefects] where Active = 1) as c
on x.IdMainDefect = c.IdBlockingDefect
/*
get lot and machine info
*/
left join
(select location as machine,
runningnumber as lot
,planstart
,planend
from [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] (nolock) x
left join
[test1_AlplaPROD2.0_Read].[masterData].[Machine] (nolock) m on
m.id = x.machineid) as lot on
lot.lot = IdProdPlanung
/*
The data below will be controlled by the user in excel by default everything will be passed over
IdAdressen = 3
*/
where IdArtikelTyp = 1
and x.IdWarenlager in (1) -- the pallet must be in ppoo
and x.Lfdnr = @runningNumber -- comment this out when you want to get everything
group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description, IdAdressen,
x.AdressBez , x.Lfdnr,
IdProdPlanung
,x.IdLagerAbteilung
,x.lagerabteilungkurzbez
,lot.machine
,produktionsdatummin
,x.Barcode
order by x.IdArtikelVarianten

View File

@@ -29,7 +29,7 @@ left join
alplaprod_test1.dbo.V_LagerPositionenBarcodes (nolock) as l on alplaprod_test1.dbo.V_LagerPositionenBarcodes (nolock) as l on
ext.RunningNumber = l.Lfdnr ext.RunningNumber = l.Lfdnr
WHERE ext.SsccEanRunningNumber IN (@runningNumber) and WHERE ext.RunningNumber IN (@runningNumber) and
ext.RunningNumber NOT IN ( ext.RunningNumber NOT IN (
SELECT RunningNumber FROM [test1_AlplaPROD2.0_Read].[labelling].[InternalLabel] WHERE RunningNumber IN (@runningNumber) SELECT RunningNumber FROM [test1_AlplaPROD2.0_Read].[labelling].[InternalLabel] WHERE RunningNumber IN (@runningNumber)
) )

View File

@@ -4,48 +4,47 @@ import { createLog } from "../../logger/logger.js";
import { serverSettings } from "../../server/controller/settings/getSettings.js"; import { serverSettings } from "../../server/controller/settings/getSettings.js";
export const prodSqlConfig = async () => { export const prodSqlConfig = async () => {
try { try {
//const serverSetting = await db.select().from(settings); //const serverSetting = await db.select().from(settings);
const serverSetting = serverSettings as any; const serverSetting = serverSettings as any;
// create dummy type data // create dummy type data
const server = serverSetting.filter((s: any) => s.name === "dbServer"); const server = serverSetting.filter((s: any) => s.name === "dbServer");
const plantToken = serverSetting.filter( const plantToken = serverSetting.filter(
(s: any) => s.name === "plantToken" (s: any) => s.name === "plantToken",
); );
const dbUser = serverSetting.filter((s: any) => s.name === "dbUser"); const dbUser = serverSetting.filter((s: any) => s.name === "dbUser");
// if erroring out double check the password was actually encoded before saving // if erroring out double check the password was actually encoded before saving
const dbPassword = serverSetting.filter( const dbPassword = serverSetting.filter((s: any) => s.name === "dbPass");
(s: any) => s.name === "dbPass"
);
const sqlConfig = { const sqlConfig = {
server: server[0].value, server:
database: `AlplaPROD_${plantToken[0].value}_cus`, process.env.NODE_ENV !== "development" ? "localhost" : server[0].value,
user: dbUser[0].value, database: `AlplaPROD_${plantToken[0].value}_cus`,
password: atob(dbPassword[0].value), user: dbUser[0].value,
options: { password: atob(dbPassword[0].value),
encrypt: true, options: {
trustServerCertificate: true, encrypt: true,
}, trustServerCertificate: true,
requestTimeout: 90000, // in milliseconds },
pool: { requestTimeout: 90000, // in milliseconds
max: 20, // Maximum number of connections in the pool pool: {
min: 0, // Minimum number of connections in the pool max: 20, // Maximum number of connections in the pool
idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released min: 0, // Minimum number of connections in the pool
reapIntervalMillis: 1000, // how often to check for idle resourses to destory idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released
acquireTimeoutMillis: 100000, // How long until a complete timeout happens reapIntervalMillis: 1000, // how often to check for idle resourses to destory
}, acquireTimeoutMillis: 100000, // How long until a complete timeout happens
}; },
};
return sqlConfig; return sqlConfig;
} catch (error) { } catch (error) {
createLog( createLog(
"info", "info",
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(
error error,
)} "There was an error getting/setting up the config for the prod sql server."` )} "There was an error getting/setting up the config for the prod sql server."`,
); );
} }
}; };

View File

@@ -0,0 +1,21 @@
import { readFileSync } from "fs";
export const sqlQuerySelector = (name: string) => {
try {
const queryFile = readFileSync(
new URL(`../querys/${name}.sql`, import.meta.url),
"utf8",
);
return {
success: true,
query: queryFile,
};
} catch (error) {
return {
success: false,
message:
"Error getting the query file, please make sure you have the correct name.",
};
}
};

View File

@@ -3,53 +3,72 @@ import net from "net";
/** /**
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol. * This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
*/ */
const prodIP = "10.44.0.26"; const prodIP = "10.204.0.26";
const prodPort = 50001; const prodPort = 50000;
const scannerID = "98@"; const scannerID = "98@";
const scannerCommand = "AlplaPRODcmd00000042#000047909"; // top of the picksheet const scannerCommand = "AlplaPRODcmd00000042#000028547"; // top of the picksheet
const scannerCommand2 = ""; // bottom of the pick sheet const scannerCommand2 = ""; // bottom of the pick sheet
const labels = [ const labels = [
"1000000000000000000000000000000006544320", "1000000000000000000000000000000005566030",
"1000000000000000000000000000000006544280", "1000000000000000000000000000000005544896",
"1000000000000000000000000000000006544410", "1000000000000000000000000000000005544906",
"1000000000000000000000000000000006544490", "1000000000000000000000000000000005544916",
"1000000000000000000000000000000006544450", "1000000000000000000000000000000005544926",
"1000000000000000000000000000000006544520", "1000000000000000000000000000000005544936",
"1000000000000000000000000000000006544590", "1000000000000000000000000000000005544946",
"1000000000000000000000000000000006544560", "1000000000000000000000000000000005544956",
"1000000000000000000000000000000006544860", "1000000000000000000000000000000005544966",
"1000000000000000000000000000000006544830", "1000000000000000000000000000000005544976",
"1000000000000000000000000000000006544930", "1000000000000000000000000000000005544986",
"1000000000000000000000000000000006544890", "1000000000000000000000000000000005544996",
"1000000000000000000000000000000006545100", "1000000000000000000000000000000005545006",
"1000000000000000000000000000000006545060", "1000000000000000000000000000000005545016",
"1000000000000000000000000000000006545270", "1000000000000000000000000000000005545026",
"1000000000000000000000000000000006545220", "1000000000000000000000000000000005545036",
"1000000000000000000000000000000006544990", "1000000000000000000000000000000005545046",
"1000000000000000000000000000000006545040", "1000000000000000000000000000000005545056",
"1000000000000000000000000000000006545520", "1000000000000000000000000000000005545066",
"1000000000000000000000000000000006545490", "1000000000000000000000000000000005545076",
"1000000000000000000000000000000006545450", "1000000000000000000000000000000005545086",
"1000000000000000000000000000000006545560", "1000000000000000000000000000000005545096",
"1000000000000000000000000000000006545760", "1000000000000000000000000000000005545106",
"1000000000000000000000000000000006545640", "1000000000000000000000000000000005545116",
"1000000000000000000000000000000006545690", "1000000000000000000000000000000005545126",
"1000000000000000000000000000000006545620", "1000000000000000000000000000000005545136",
"1000000000000000000000000000000006546450", "1000000000000000000000000000000005545146",
"1000000000000000000000000000000006546500", "1000000000000000000000000000000005545156",
"1000000000000000000000000000000006545940", "1000000000000000000000000000000005545166",
"1000000000000000000000000000000006545900", "1000000000000000000000000000000005545176",
"1000000000000000000000000000000006545850", "1000000000000000000000000000000005545186",
"1000000000000000000000000000000006545820", "1000000000000000000000000000000005544580",
"1000000000000000000000000000000006546530", "1000000000000000000000000000000005544590",
"1000000000000000000000000000000006545330", "1000000000000000000000000000000005544600",
"1000000000000000000000000000000006546090", "1000000000000000000000000000000005544610",
"1000000000000000000000000000000006546220", "1000000000000000000000000000000005544640",
"1000000000000000000000000000000006546120", "1000000000000000000000000000000005544650",
"1000000000000000000000000000000006546140", "1000000000000000000000000000000005544660",
"1000000000000000000000000000000006546260", "1000000000000000000000000000000005544670",
"1000000000000000000000000000000006546310", "1000000000000000000000000000000005544680",
"1000000000000000000000000000000005544690",
"1000000000000000000000000000000005544700",
"1000000000000000000000000000000005544710",
"1000000000000000000000000000000005544720",
"1000000000000000000000000000000005544730",
"1000000000000000000000000000000005544740",
"1000000000000000000000000000000005544750",
"1000000000000000000000000000000005544760",
"1000000000000000000000000000000005544770",
"1000000000000000000000000000000005544780",
"1000000000000000000000000000000005544790",
"1000000000000000000000000000000005544800",
"1000000000000000000000000000000005544810",
"1000000000000000000000000000000005544820",
"1000000000000000000000000000000005544830",
"1000000000000000000000000000000005544840",
"1000000000000000000000000000000005544850",
"1000000000000000000000000000000005544860",
"1000000000000000000000000000000005544870",
]; ];
const STX = "\x02"; const STX = "\x02";
const ETX = "\x03"; const ETX = "\x03";