Compare commits

...

12 Commits

33 changed files with 1119 additions and 342 deletions

View File

@@ -13,8 +13,8 @@ post {
body:json { body:json {
{ {
"runningNr": "1864553", "runningNr": "1865027",
"reason": "30006" "reason": "packer printed premature"
} }
} }

View File

@@ -5,7 +5,7 @@ meta {
} }
post { post {
url: {{url}}/lst/old/api/logistics/bookout url: {{url}}/lst/old/api/logistics/removeasreusable
body: json body: json
auth: none auth: none
} }
@@ -13,8 +13,8 @@ post {
body:json { body:json {
{ {
"runningNr": "1865027", "runningNr": "1865018",
"reason": "packer printed premature" "reason": "validating stockout"
} }
} }

View File

@@ -58,6 +58,8 @@ router.get("/", async (req, res) => {
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${( memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024 used.rss / 1024 / 1024
).toFixed(2)} MB`, ).toFixed(2)} MB`,
eomFGPkgSheetVersion: 1, // this is the excel file version when we have a change to the macro we want to grab this
masterMacroFile: 1, // this is the excel file version when we have a change to the macro we want to grab this
}); });
}); });

View File

@@ -1,123 +1,125 @@
import type { Address } from "nodemailer/lib/mailer/index.js";
import type { Transporter } from "nodemailer"; import type { Transporter } from "nodemailer";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import type Mail from "nodemailer/lib/mailer/index.js";
import os from "os";
import nodemailer from "nodemailer"; import nodemailer from "nodemailer";
import type Mail from "nodemailer/lib/mailer/index.js";
import type { Address } from "nodemailer/lib/mailer/index.js";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import hbs from "nodemailer-express-handlebars";
import os from "os";
import path from "path"; import path from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
import { promisify } from "util"; import { promisify } from "util";
import hbs from "nodemailer-express-handlebars";
import { createLogger } from "../../logger/logger.js"; import { createLogger } from "../../logger/logger.js";
interface HandlebarsMailOptions extends Mail.Options { interface HandlebarsMailOptions extends Mail.Options {
template: string; template: string;
context: Record<string, unknown>; context: Record<string, unknown>;
} }
interface EmailData { interface EmailData {
email: string; email: string;
subject: string; subject: string;
template: string; template: string;
context: Record<string, unknown>; context: Record<string, unknown>;
} }
export const sendEmail = async (data: EmailData): Promise<any> => { export const sendEmail = async (data: EmailData): Promise<any> => {
const log = createLogger({ module: "pkg", subModule: "sendMail" }); const log = createLogger({ module: "pkg", subModule: "sendMail" });
let transporter: Transporter; let transporter: Transporter;
let fromEmail: string | Address; let fromEmail: string | Address;
if ( // if (
os.hostname().includes("OLP") && // os.hostname().includes("OLP") &&
process.env.EMAIL_USER && // process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD // process.env.EMAIL_PASSWORD
) { // ) {
transporter = nodemailer.createTransport({ // transporter = nodemailer.createTransport({
service: "gmail", // service: "gmail",
auth: { // auth: {
user: process.env.EMAIL_USER, // user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD, // pass: process.env.EMAIL_PASSWORD,
}, // },
//debug: true, // //debug: true,
}); // });
// update the from email // // update the from email
fromEmail = process.env.EMAIL_USER; // fromEmail = process.env.EMAIL_USER;
} else { // } else {
// convert to the correct plant token. // // convert to the correct plant token.
let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`; //let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
//const testServers = ["vms036", "VMS036"]; //const testServers = ["vms036", "VMS036"];
if (os.hostname().includes("VMS036")) { // if (os.hostname().includes("VMS036")) {
host = "USMCD1-smtp.alpla.net"; // host = "USMCD1-smtp.alpla.net";
} // }
// if (plantToken[0].value === "usiow2") { // if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net"; // host = "USIOW1-smtp.alpla.net";
// } // }
transporter = nodemailer.createTransport({ transporter = nodemailer.createTransport({
host: host, host: "smtp.azurecomm.net",
port: 25, port: 587,
rejectUnauthorized: false, //rejectUnauthorized: false,
//secure: false, tls: {
// auth: { minVersion: "TLSv1.2",
// user: "alplaprod", },
// pass: "obelix", auth: {
// }, user: "donotreply@mail.alpla.com",
debug: true, pass: process.env.SMTP_PASSWORD,
} as SMTPTransport.Options); },
debug: true,
} as SMTPTransport.Options);
// update the from email // update the from email
fromEmail = `noreply@alpla.com`; fromEmail = `DoNotReply@mail.alpla.com`;
} //}
// creating the handlbar options // creating the handlbar options
const viewPath = path.resolve( const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)), path.dirname(fileURLToPath(import.meta.url)),
"./views/" "./views/",
); );
const handlebarOptions = { const handlebarOptions = {
viewEngine: { viewEngine: {
extname: ".hbs", extname: ".hbs",
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory //layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
defaultLayout: "", // Specify the default layout defaultLayout: "", // Specify the default layout
partialsDir: viewPath, partialsDir: viewPath,
}, },
viewPath: viewPath, viewPath: viewPath,
extName: ".hbs", // File extension for Handlebars templates extName: ".hbs", // File extension for Handlebars templates
}; };
transporter.use("compile", hbs(handlebarOptions)); transporter.use("compile", hbs(handlebarOptions));
const mailOptions: HandlebarsMailOptions = { const mailOptions: HandlebarsMailOptions = {
from: fromEmail, from: fromEmail,
to: data.email, to: data.email,
subject: data.subject, subject: data.subject,
//text: "You will have a reset token here and only have 30min to click the link before it expires.", //text: "You will have a reset token here and only have 30min to click the link before it expires.",
//html: emailTemplate("BlakesTest", "This is an example with css"), //html: emailTemplate("BlakesTest", "This is an example with css"),
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs') template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
context: data.context, context: data.context,
}; };
// now verify and send the email // now verify and send the email
const sendMailPromise = promisify(transporter.sendMail).bind(transporter); const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
try { try {
// Send email and await the result // Send email and await the result
const info = await sendMailPromise(mailOptions); const info = await sendMailPromise(mailOptions);
log.info(null, `Email was sent to: ${data.email}`); log.info(null, `Email was sent to: ${data.email}`);
return { success: true, message: "Email sent.", data: info }; return { success: true, message: "Email sent.", data: info };
} catch (err) { } catch (err) {
console.log(err); console.log(err);
log.error( log.error(
{ error: err }, { error: err },
`Error sending Email to : ${data.email}` `Error sending Email to : ${data.email}`,
); );
return { success: false, message: "Error sending email.", error: err }; return { success: false, message: "Error sending email.", error: err };
} }
}; };

View File

@@ -59,7 +59,7 @@ export default function Relocate() {
validators={{ validators={{
// We can choose between form-wide and field-specific validators // We can choose between form-wide and field-specific validators
onChange: ({ value }) => onChange: ({ value }) =>
value.length > 2 value.length > 0
? undefined ? undefined
: "Please enter a valid running number", : "Please enter a valid running number",
}} }}
@@ -88,7 +88,7 @@ export default function Relocate() {
validators={{ validators={{
// We can choose between form-wide and field-specific validators // We can choose between form-wide and field-specific validators
onChange: ({ value }) => onChange: ({ value }) =>
value.length > 2 ? undefined : "Please enter a valid lane ID", value.length > 0 ? undefined : "Please enter a valid lane ID",
}} }}
children={(field) => { children={(field) => {
return ( return (

View File

@@ -10,7 +10,8 @@
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts", "dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts", "dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server", "build": "npm run build:server",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ", "build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y && npm run build:copySql",
"build:copySql": "xcopy server\\services\\sqlServer\\querys\\newQueries dist\\server\\services\\sqlServer\\querys\\newQueries\\ /E /I /Y ",
"build:frontend": "cd frontend && npm run build", "build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild", "build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts", "copy:scripts": "tsx server/scripts/copyScripts.ts",

View File

@@ -37,6 +37,10 @@ export class ScannerClient {
} }
private initialize() { private initialize() {
if (!this.host || !this.port) {
console.log("Host or port is missing");
return;
}
this.socket.connect(this.port, this.host, () => { this.socket.connect(this.port, this.host, () => {
console.info("Connected to scanner"); console.info("Connected to scanner");
this.connected = true; this.connected = true;

View File

@@ -51,6 +51,7 @@ export const psiGetPlanningData = async (
}; };
} }
// TODO: if we are not running planning we no pass the old structure if we are running new planning use the below improved version that makes sure we dont have negative numebrs.
articles = data.data; articles = data.data;
return { return {

View File

@@ -145,7 +145,7 @@ app.openapi(
return c.json({ return c.json({
success: true, success: true,
message: "All Current Active Querys.", message: "All Current Active Querys.",
sheetVersion: 2.8, sheetVersion: 2.8, // TODO: when this gets switched change this
data: current, data: current,
}); });
}, },

View File

@@ -5,6 +5,7 @@ import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js"; import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js"; import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
type Data = { type Data = {
runningNr: number; runningNr: number;
@@ -16,7 +17,32 @@ export const relatePallet = async (data: Data) => {
// console.log(data); // console.log(data);
// create the url to post // create the url to post
const url = await prodEndpointCreation("/public/v1.0/Warehousing/Relocate");
// do we have warehousing turned on?
const { data: feature, error: featureError } = (await tryCatch(
query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/Relocate";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/Relocate";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Unit/Relocate";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const SSCC = await createSSCC(runningNr); const SSCC = await createSSCC(runningNr);
const consumeSomething = { const consumeSomething = {
ScannerId: 999, ScannerId: 999,
@@ -40,7 +66,7 @@ export const relatePallet = async (data: Data) => {
}; };
} }
if (results.data.Result !== 0) { if (results.data.Result !== 0 || results.data.data.length <= 0) {
return { return {
success: false, success: false,
message: results.data.Message, message: results.data.Message,

View File

@@ -1,6 +1,7 @@
import axios from "axios"; import axios from "axios";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js"; import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
export const postAdjustment = async (data: any) => { export const postAdjustment = async (data: any) => {
if (data.warehouseId === undefined) { if (data.warehouseId === undefined) {
@@ -35,9 +36,30 @@ export const postAdjustment = async (data: any) => {
quantity: data.quantity, quantity: data.quantity,
}; };
let url = await prodEndpointCreation( // do we have warehousing turned on?
"/public/v1.0/Warehousing/AdjustSiloStockLevel", const { data: feature, error: featureError } = (await tryCatch(
); query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const { data: silo, error } = await tryCatch( const { data: silo, error } = await tryCatch(
axios.post(url, siloAdjustment, { axios.post(url, siloAdjustment, {

View File

@@ -4,95 +4,92 @@ import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js"; import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js"; import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js";
import { sendEmail } from "../sendMail.js";
const notification = async (notifyData: any) => { const notification = async (notifyData: any) => {
/** /**
* Pass the entire notification over * Pass the entire notification over
*/ */
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`); createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails. // validate if there are any emails.
if (notifyData.emails === "") { if (notifyData.emails === "") {
createLog( createLog(
"error", "error",
"reprinting", "reprinting",
"notify", "notify",
`There are no emails set for ${notifyData.name}` `There are no emails set for ${notifyData.name}`,
); );
return; return;
} }
//let labels: Labels[]; //let labels: Labels[];
const { data: l, error: labelError } = await tryCatch( const { data: l, error: labelError } = await tryCatch(
query( query(
bow2incoming.replace( bow2incoming.replace("[time]", notifyData.notifiySettings.processTime),
"[time]", "Label Reprints",
notifyData.notifiySettings.processTime ),
), );
"Label Reprints" const labels: any = l?.data as any;
) if (labelError) {
); createLog(
const labels: any = l?.data as any; "error",
if (labelError) { "reprinting",
createLog( "notify",
"error", `Failed to get the labels: ${labelError}`,
"reprinting", );
"notify", return;
`Failed to get the labels: ${labelError}` }
);
return;
}
if (labels.length > 0) { if (labels.length > 0) {
//send the email :D //send the email :D
const emailSetup = { const emailSetup = {
email: notifyData.emails, email: notifyData.emails,
subject: "Alert! New incoming goods has been received", subject: "Alert! New incoming goods has been received",
template: "bow2IncomingGoods", template: "bow2IncomingGoods",
context: { context: {
items: labels, items: labels,
time: notifyData.notifiySettings.processTime, time: notifyData.notifiySettings.processTime,
}, },
}; };
const sentEmail = await sendEmail(emailSetup); const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) { if (!sentEmail.success) {
createLog( createLog(
"error", "error",
"reprinting", "reprinting",
"notify", "notify",
"Failed to send email, will try again on next interval" "Failed to send email, will try again on next interval",
); );
return; return;
} }
// // update the last time we ran and the prod id // // update the last time we ran and the prod id
// const notifUpdate = { // const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie, // prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(), // lastRan: nowDate(),
// }; // };
// update the last time ran // update the last time ran
const { data, error } = await tryCatch( const { data, error } = await tryCatch(
db db
.update(notifications) .update(notifications)
.set({ .set({
lastRan: sql`NOW()`, lastRan: sql`NOW()`,
notifiySettings: { notifiySettings: {
...notifyData.notifiySettings, ...notifyData.notifiySettings,
prodID: labels[0].IdEtikettenHistorie, prodID: labels[0].IdEtikettenHistorie,
}, },
}) })
.where(eq(notifications.name, notifyData.name)) .where(eq(notifications.name, notifyData.name)),
); );
} else { } else {
return; return;
} }
}; };
export default notification; export default notification;

View File

@@ -0,0 +1,108 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
export interface Labels {
IdEtikettenHistorie?: number;
}
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"reprinting",
"notify",
`There are no emails set for ${notifyData.name}`,
);
return;
}
const cycleCountCheck = sqlQuerySelector("cycleCountCheck.query") as SqlQuery;
if (!cycleCountCheck.success) {
console.log("Failed to load the query: ", cycleCountCheck.message);
return;
}
const { data: c, error: cError } = await tryCatch(
query(
cycleCountCheck.query.replace("[timeTest]", notifyData.checkInterval),
"Cycle count check",
),
);
const cycle: any = c?.data ?? ([] as any);
//console.log(cycle);
if (cError) {
createLog(
"error",
"reprinting",
"notify",
`Failed to get the labels: ${cError}`,
);
return;
}
if (cycle.length > 0) {
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: `Alert! RowBlocked for more than ${notifyData.checkInterval} min(s)`,
template: "cycleCountCheck",
context: {
checkTime: notifyData.checkInterval,
items: cycle,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"reprinting",
"notify",
"Failed to send email, will try again on next interval",
);
return;
}
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// update the last time ran
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
// notifiySettings: {
// ...notifyData.notifiySettings,
// prodID: labels[0].IdEtikettenHistorie,
// },
})
.where(eq(notifications.name, notifyData.name)),
);
} else {
return;
}
};
export default notification;

View File

@@ -0,0 +1,183 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
let running = false;
export default async function platToPlantEdi(notifyData: any) {
createLog("info", "plantToPlant", "notify", `monitoring ${notifyData.name}`);
if (running) {
createLog(
"info",
"plantToPlant",
"notify",
`Notifcation ${notifyData.name} is already running skipping`,
);
return;
}
running = true;
const { data: noti, error: notiError } = (await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, notifyData.name)),
)) as any;
if (notiError) {
createLog(
"error",
"edi",
"notify",
"Error in getting the notification data",
);
}
// get the default emails they can be blank if as we will only add these to the end of the email from the full flow
let emails = noti[0]?.email ?? "";
const checkBol = sqlQuerySelector("checkBol.query");
if (!checkBol.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
const pLinkedB = sqlQuerySelector("palletsLinkedToBol.query");
if (!pLinkedB.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
let ignoreBols: string[] = noti[0]?.notifiySettings?.processedBol ?? [];
const joinBols = ignoreBols.join(",");
let updateQuery = noti[0]?.notifiySettings?.includeAll
? checkBol?.query?.replace(
"and a.bezeichnung like '%Alpla%'",
"--and a.bezeichnung like '%Alpla%'",
)
: checkBol?.query;
const { data: b, error: bError } = (await tryCatch(
query(
updateQuery
?.replace("[timeCheck]", noti[0]?.checkInterval ?? "30")
.replace("[ignoreBols]", joinBols ?? 500) ?? "",
"Check bol",
),
)) as any;
if (bError) {
return {
success: false,
message: "Error getting newly created bols",
data: bError,
};
}
const planedByBol = new Map<string, string[]>();
for (const row of b.data) {
if (!planedByBol.has(row.bol)) {
planedByBol.set(row.bol, []);
}
planedByBol.get(row.bol)!.push(String(row.idladeplanung));
}
if (b.data.length > 0) {
// loop each bol in the system and get the bols only
for (const [bolNumber, idList] of planedByBol.entries()) {
//for (const bol of b.data) {
// run the process to get the the pallet numbers
const joinedIdLadeplanung = idList.join(",");
//console.log("BOL:", bolNumber);
//console.log("IDLadeplanung string:", joinedIdLadeplanung);
//console.log("IgnoreBols: ", joinBols);
// now get the pallets that are witing the ladeplanning
const { data: pallets, error: pError } = await tryCatch(
query(
pLinkedB?.query?.replace(
"[palLinkedToBol]",
joinedIdLadeplanung ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
);
//console.log(pallets);
// console.log("Address: ", b.data[0].addressId ?? "0");
if (b.data[0].addressId === "") return;
ignoreBols.push(bolNumber);
if (ignoreBols.length > 15) {
ignoreBols.splice(0, ignoreBols.length - 15);
}
// get the email address.
const checkBol = sqlQuerySelector("addressInfo.query");
const { data: address, error: aError } = (await tryCatch(
query(
checkBol?.query?.replace(
"[customerAddress]",
b.data[0].addressId ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
)) as any;
if (noti[0]?.emails === "") return; // no default emails
// setup the email to be sent :D
const emailSetup = {
email: `${noti[0]?.emails};${address.data[0].email ?? ""}`,
subject: `New EDI transfer Created for BOL: ${bolNumber}`,
template: "plantToPlantEdi",
context: {
items: pallets?.data ?? [],
bol: bolNumber,
//secondarySetting: notifyData.notifiySettings,
},
};
// send the email
await sendEmail(emailSetup);
// add the bols to be ignored
await db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...noti[0]?.notifiySettings,
processedBol: ignoreBols,
},
})
.where(eq(notifications.name, notifyData.name));
}
running = false;
return {
success: true,
message: "All bols have been processed",
data: [ignoreBols],
};
}
running = false;
return {
success: true,
message: "No new bols have been created",
data: [],
};
}

View File

@@ -45,57 +45,57 @@ export const sendEmail = async (data: any): Promise<any> => {
}; };
} }
// get the plantToken // get the plantToken
const server = settingData.filter((n) => n.name === "server"); //const server = settingData.filter((n) => n.name === "server");
if ( // if (
server[0].value === "localhostx" && // server[0].value === "localhostx" &&
process.env.EMAIL_USER && // process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD // process.env.EMAIL_PASSWORD
) { // ) {
transporter = nodemailer.createTransport({ // transporter = nodemailer.createTransport({
service: "gmail", // service: "gmail",
host: "smtp.gmail.com", // host: "smtp.gmail.com",
port: 465, // port: 465,
auth: { // auth: {
user: process.env.EMAIL_USER, // user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD, // pass: process.env.EMAIL_PASSWORD,
}, // },
//debug: true, // //debug: true,
}); // });
// update the from email // // update the from email
fromEmail = process.env.EMAIL_USER; // fromEmail = process.env.EMAIL_USER;
} else { //} else {
// convert to the correct plant token. // convert to the correct plant token.
const plantToken = settingData.filter((s) => s.name === "plantToken"); //const plantToken = settingData.filter((s) => s.name === "plantToken");
let host = `${plantToken[0].value}-smtp.alpla.net`; // let host = `${plantToken[0].value}-smtp.alpla.net`;
const testServers = ["test1", "test2", "test3"]; // const testServers = ["test1", "test2", "test3"];
if (testServers.includes(plantToken[0].value)) { // if (testServers.includes(plantToken[0].value)) {
host = "USMCD1-smtp.alpla.net"; // host = "USMCD1-smtp.alpla.net";
} // }
if (plantToken[0].value === "usiow2") { // if (plantToken[0].value === "usiow2") {
host = "USIOW1-smtp.alpla.net"; // host = "USIOW1-smtp.alpla.net";
} // }
transporter = nodemailer.createTransport({ transporter = nodemailer.createTransport({
host: host, host: "smtp.azurecomm.net",
port: 25, port: 587,
rejectUnauthorized: false, //rejectUnauthorized: false,
//secure: false, tls: {
// auth: { minVersion: "TLSv1.2",
// user: "alplaprod", },
// pass: "obelix", auth: {
// }, user: "donotreply@mail.alpla.com",
debug: true, pass: process.env.SMTP_PASSWORD,
} as SMTPTransport.Options); },
debug: true,
// update the from email });
fromEmail = `donotreply@alpla.com`; fromEmail = `DoNotReply@mail.alpla.com`;
} //}
// creating the handlbar options // creating the handlbar options
const viewPath = path.resolve( const viewPath = path.resolve(

View File

@@ -12,6 +12,7 @@ import blocking from "./routes/qualityBlocking.js";
import sendemail from "./routes/sendMail.js"; import sendemail from "./routes/sendMail.js";
import errorHandling from "./routes/tooManyErrors.js"; import errorHandling from "./routes/tooManyErrors.js";
import { note, notificationCreate } from "./utils/masterNotifications.js"; import { note, notificationCreate } from "./utils/masterNotifications.js";
import { sqlJobCleanUp } from "./utils/notificationSqlCleanup.js";
import { startNotificationMonitor } from "./utils/processNotifications.js"; import { startNotificationMonitor } from "./utils/processNotifications.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
@@ -57,6 +58,7 @@ if (notesError) {
setTimeout(() => { setTimeout(() => {
notificationCreate(); notificationCreate();
startNotificationMonitor(); startNotificationMonitor();
sqlJobCleanUp();
}, 5 * 1000); }, 5 * 1000);
export default app; export default app;

View File

@@ -152,6 +152,28 @@ export const note: any = [
errorCount: 10, // change this to something else or leave blank to use the av type errorCount: 10, // change this to something else or leave blank to use the av type
}, },
}, },
{
name: "cycleCountCheck",
description:
"Checks if a cycle count has been active for longer than the defined time.",
checkInterval: 60,
timeType: "min",
emails: "",
active: false,
notifiySettings: {
errorCount: 10, // change this to something else or leave blank to use the av type
},
},
{
name: "platToPlantEdi",
description:
"This is the plant to plant edi that will send an edi to the email once it ships, the emails will be for the receiving plants",
checkInterval: 15,
timeType: "min",
emails: "blake.matthes@alpla.com;Maritza.Hernandez@alpla.com",
active: false,
notifiySettings: { processedBol: [500], includeAll: false },
},
]; ];
export const notificationCreate = async () => { export const notificationCreate = async () => {

View File

@@ -0,0 +1,86 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../../sqlServer/utils/querySelector.utils.js";
const cleanUpQuery = `
DECLARE @JobName varchar(max) = '[jobName]'
UPDATE msdb.dbo.sysjobs
SET enabled = 0
WHERE name = @JobName;
`;
// disable the jobs
const jobNames: string[] = [
"monitor_$_lots",
"monitor_$_lots_2",
"monitor$lots",
"Monitor_APO", //listen for people to cry this is no longer a thing
"Monitor_APO2",
"Monitor_AutoConsumeMaterials", // TODO: migrate to lst
"Monitor_AutoConsumeMaterials_iow1",
"Monitor_AutoConsumeMaterials_iow2",
"Monitor_BlockedINV_Loc",
"monitor_inv_cycle",
"monitor_inv_cycle_1",
"monitor_inv_cycle_2",
"monitor_edi_import", // TODO: migrate to lst -- for the query select count(*) from AlplaPROD_test3.dbo.T_EDIDokumente (nolock) where /* IdLieferant > 1 and */ add_date > DATEADD(MINUTE, -30, getdate())
"Monitor_Lot_Progression",
"Monitor_Lots", // TODO: migrate to lst -- this should be the one where we monitor the when a lot is assigned if its missing some data.
"Monitor_MinMax", // TODO:Migrate to lst
"Monitor_MinMax_iow2",
"Monitor_PM",
"Monitor_Purity",
"monitor_wastebookings", // TODO: Migrate
"LastPriceUpdate", // not even sure what this is
"GETLabelsCount", // seems like an old jc job
"jobforpuritycount", // was not even working correctly
"Monitor_EmptyAutoConsumLocations", // not sure who uses this one
"monitor_labelreprint", // Migrated but need to find out who really wants this
"test", // not even sure why this is active
"UpdateLastMoldUsed", // old jc inserts data into a table but not sure what its used for not linked to any other alert
"UpdateWhsePositions3", // old jc inserts data into a table but not sure what its used for not linked to any other alert
"UpdateWhsePositions4",
"delete_print", // i think this was in here for when we was having lag prints in iowa1
"INV_WHSE_1", // something random i wrote long time ago looks like an inv thing to see aged stuff
"INV_WHSE_2",
"laneAgeCheck", // another strange one thats been since moved to lst
"monitor_blocking_2",
"monitor_blocking", // already in lst
"monitor_min_inv", // do we still want this one? it has a description of: this checks m-f the min inventory of materials based on the min level set in stock
"Monitor_MixedLocations",
"Monitor_PM",
"Monitor_PM2",
"wrong_lots_1",
"wrong_lots_2",
"invenotry check", // spelling error one of my stupids
"monitor_hold_monitor",
"Monitor_Silo_adjustments",
"monitor_qualityLocMonitor", // validating with lima this is still needed
];
export const sqlJobCleanUp = async () => {
// running a query to disable jobs that are moved to lst to be better maintained
const sqlQuery = sqlQuerySelector("disableJob.query") as SqlQuery;
if (!sqlQuery.success) {
console.log("Failed to load the query: ", sqlQuery.message);
return;
}
for (const job of jobNames) {
const { data, error } = await tryCatch(
query(
sqlQuery.query.replace("[jobName]", `${job}`),
`Disabling job: ${job}`,
),
);
if (error) {
console.log(error);
}
//console.log(data);
}
};

View File

@@ -0,0 +1,44 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>The below are cycle counts that have been in progress for longer than {{checkTime}} min(s). </p>
<table >
<thead>
<tr>
<th>WarehouseID</th>
<th>Warehouse</th>
<th>LocationID</th>
<th>Location</th>
<th>Cycle count Started</th>
<th>Started by</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{idWarehouse}}</td>
<td>{{warehouse}}</td>
<td>{{locationId}}</td>
<td>{{location}}</td>
<td>{{cycleCountStartAt}}</td>
<td>{{blockedBy}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,46 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>BOL: {{bol}} was created with the below pallets.</p>
<p>Please head to stock and import the pallets via the normal incoming goods process (now/immediately).</p>
<p>When encountering a discrepancy in pallets/cages received, please correct this after the pallets have been imported.</p>
<p>Due to these being plant to plant shipments, the only way to correct this is to bring them in then undo the incoming goods process.</p>
<br></br>
<table >
<thead>
<tr>
<th>Running Number</th>
<th>AV</th>
<th>Description</th>
<th>Lot number</th>
<th>Quantity</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{runningNr}}</td>
<td>{{article}}</td>
<td>{{alias}}</td>
<td>{{lotNumber}}</td>
<td>{{qty}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -41,7 +41,7 @@ export const qualityCycle = async () => {
message: "There was an error getting quality request data", message: "There was an error getting quality request data",
}; };
} }
const lstQData: any = data; const lstQData: any = data ?? [];
// get the pallets that currentStat is moved // get the pallets that currentStat is moved
// const res = await runQuery(palletMoveCheck, "palletCheck"); // const res = await runQuery(palletMoveCheck, "palletCheck");

View File

@@ -167,23 +167,23 @@ export async function query(queryToRun: string, name: string) {
const dbServer = serverSettings.filter( const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer", (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort( // const serverUp = await checkHostnamePort(
`${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`, // `${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
); // );
if (!serverUp) { // if (!serverUp) {
createLog( // createLog(
"error", // "error",
"lst", // "lst",
"server", // "server",
`The sql ${dbServer[0].value} is not reachable`, // `Failed to run query due to ${dbServer[0].value} not being reachable.`,
); // );
return { // return {
success: false, // success: false,
message: `The sql ${dbServer[0].value} is not reachable`, // message: `Failed to run query due to ${dbServer[0].value} not being reachable.`,
data: [], // data: [],
}; // };
} // }
if (!connected) { if (!connected) {
createLog( createLog(

View File

@@ -0,0 +1,14 @@
use [test1_AlplaPROD2.0_Read]
select
humanreadableId as addressId
,ContactEmail as email
,ContactName
,ContactPhoneNumber
,Name
,Street
,City
,ZipCode
--,*
from [masterData].[Address] (nolock)
where humanreadableid = [customerAddress]

View File

@@ -0,0 +1,43 @@
use AlplaPROD_test1
/**
check if we have any new alpla bols that were created
*/
SELECT
x.idladeplanung
,e.idjournal
,e.journalNummer as bol
,e.idjournalstatus
,e.ladeDatum as loadDate
,e.bemerkung
,e.ereporting_idwerk
,e.journalDatum
,a.idadressen as addressId
,a.bezeichnung as addressDescription
,a.strasse as streetAddress
,a.ort as cityState
,a.plz as zipcode
,idauftrag as releaseNumber
--,*
FROM [dbo].[T_EAIJournal] as e with (nolock)
-- pull in the address so we only pull in florence data
left join
[dbo].[T_EAIJournalAdresse] as a with (nolock) on
a.[IdJournalAdresse] = [IdJournalKundenAdresse]
-- get the table to link the pallets to the bol
left join
[dbo].[T_EAIJournalPosition] as x with (nolock) on
x.idjournal = e.idjournal
where idjournalStatus = 62
--and idadressen = 270
and a.bezeichnung like '%Alpla%' -- we only want to monitor for addresses that are linked to alpla.
and JournalDatum > DATEADD(MINUTE, -[timeCheck], GETDATE())
and e.journalNummer not in ([ignoreBols])
and idauftrag > 1 -- this will ignore all incoming goodsv as we are really only looking for outbound deliveries
order by JournalDatum desc

View File

@@ -0,0 +1,33 @@
/*
checks the age of an inventory dose not exceed x time
*/
use AlplaPROD_test1
DECLARE @timeCheck INT = [timeTest]
select
w.IdWarenLager as idWarehouse
,w.KurzBezeichnung as warehouse
,b.IdLagerAbteilung as locationId
,x.KurzBezeichnung as 'location'
--,case when b.upd_date < Dateadd(minute, -(@timeCheck * 1.5), getdate()) then 'OVERDUE' else 'In-Progress' end as invStatus
,format(b.Upd_Date, 'M/d/yyyy HH:mm') as cycleCountStartAt
,b.Upd_User as blockedBy
--,*
from [dbo].[V_LagerAbteilungenInventuren] (nolock) as b
-- get the loction name
left join
dbo.T_LagerAbteilungen (nolock) as x
on x.IdLagerAbteilung = b.IdLagerAbteilung
-- get the whse
left join
dbo.T_WarenLager (nolock) as w
on x.idWarenLager = w.idWarenLager
where status = 1
and b.Upd_Date < Dateadd(minute, -@timeCheck, getdate())

View File

@@ -0,0 +1,8 @@
/*
disables sql jobs.
*/
EXEC msdb.dbo.sp_update_job @job_name = N'[jobName]', @enabled = 0;
-- DECLARE @JobName varchar(max) = '[jobName]'
-- UPDATE msdb.dbo.sysjobs
-- SET enabled = 0
-- WHERE name = @JobName;

View File

@@ -0,0 +1,37 @@
use AlplaPROD_test1
select * from (SELECT
p.[IdLadePlanung]
,p.[Beleg] as lotNumber
,p.[LfdNrJeArtikelKunde] as runningNr
,p.[Barcode]
,p.[ProduktionsDatum] as productionDate
,p.[Add_User] as scanDate
,p.[Add_Date]
,p.[Upd_User]
,p.[Upd_Date]
,p.[IdJournalWarenPosition]
,p.[LieferMenge] as qty
-- ,av.IdArtikelvarianten as article
-- ,av.Bezeichnung as alias
,av.articlehumanreadableid as article
,av.ArticleDescription as alias
--,[SSCC_ReserveZiffer]
--,ROW_NUMBER() OVER (PARTITION BY p.[LfdNrJeArtikelKunde] ORDER BY p.upd_date DESC) AS RowNum
--,*
FROM [dbo].[T_EAIJournalLieferPosition] as p (nolock)
-- left join
-- dbo.T_ProdPlanung as l on
-- l.IdProdPlanung = p.Beleg
left join
[test1_AlplaPROD2.0_Read].labelling.InternalLabel as av on
av.RunningNumber = p.[LfdNrJeArtikelKunde]
) as a
where idladeplanung in ([palLinkedToBol])
--and RowNum = 1
order by runningNr

View File

@@ -1,14 +1,21 @@
import { readFileSync } from "fs"; import { readFileSync } from "fs";
export type SqlQuery = {
query: string;
success: boolean;
message: string;
};
export const sqlQuerySelector = (name: string) => { export const sqlQuerySelector = (name: string) => {
try { try {
const queryFile = readFileSync( const queryFile = readFileSync(
new URL(`../querys/${name}.sql`, import.meta.url), new URL(`../querys/newQueries/${name}.sql`, import.meta.url),
"utf8", "utf8",
); );
return { return {
success: true, success: true,
message: `Query for: ${name}`,
query: queryFile, query: queryFile,
}; };
} catch (error) { } catch (error) {

View File

@@ -10,7 +10,8 @@
"dev:front": "cd frontend && npm run dev", "dev:front": "cd frontend && npm run dev",
"dev:db:migrate": "npx drizzle-kit push", "dev:db:migrate": "npx drizzle-kit push",
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts", "dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
"dev": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"", "dev": "concurrently -n \"server,oldServer\" -c \"#007755, #1F73D1\" \"npm run dev:app\" \"npm run dev:old\"",
"dev:all": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"",
"dev:old": "cd lstV2 && npm run dev", "dev:old": "cd lstV2 && npm run dev",
"copy:docs": "node scripts/lstDocCopy.mjs", "copy:docs": "node scripts/lstDocCopy.mjs",
"build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs", "build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs",

View File

@@ -93,7 +93,14 @@ scanner.connect(50000, "10.204.0.26", async () => {
}); });
scanner.on("data", async (data) => { scanner.on("data", async (data) => {
console.log("Response:", data.toString("ascii")); console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
}); });
scanner.on("close", () => { scanner.on("close", () => {

View File

@@ -5,46 +5,51 @@ import net from "net";
*/ */
const scannerID = "98@"; const scannerID = "98@";
const scannerCommand = "Alplaprodcmd112"; // to consume all the pallets const scannerCommand = "Alplaprodcmd112"; // to consume all the pallets
const lot = "AlplaPRODchg#00000016706"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it const lot = "AlplaPRODchg#11601"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it
const labels = [ const labels = [
"1000000000000000000000000000000005106656", "1000000000000000000000000000000004551860",
"1000000000000000000000000000000005106386", "1000000000000000000000000000000004551640",
"1000000000000000000000000000000005106446", "1000000000000000000000000000000004551840",
"1000000000000000000000000000000005106326", "1000000000000000000000000000000004551610",
"1000000000000000000000000000000005105726", "1000000000000000000000000000000004551720",
"1000000000000000000000000000000005106056", "1000000000000000000000000000000004551680",
"1000000000000000000000000000000005106256", "1000000000000000000000000000000004551740",
"1000000000000000000000000000000005105836", "1000000000000000000000000000000004551660",
"1000000000000000000000000000000005105986", "1000000000000000000000000000000004551570",
"1000000000000000000000000000000005105506", "1000000000000000000000000000000004551480",
"1000000000000000000000000000000005106136", "1000000000000000000000000000000004551510",
"1000000000000000000000000000000005105696", "1000000000000000000000000000000004551460",
"1000000000000000000000000000000005105426", "1000000000000000000000000000000004551600",
"1000000000000000000000000000000005105916", "1000000000000000000000000000000004551340",
"1000000000000000000000000000000005105216", "1000000000000000000000000000000004551580",
"1000000000000000000000000000000005105416", "1000000000000000000000000000000004551330",
"1000000000000000000000000000000005105196", "1000000000000000000000000000000004551290",
"1000000000000000000000000000000005105226", "1000000000000000000000000000000004551180",
"1000000000000000000000000000000005105816", "1000000000000000000000000000000004551260",
"1000000000000000000000000000000005110186", "1000000000000000000000000000000004551150",
"1000000000000000000000000000000005110256", "1000000000000000000000000000000004551390",
"1000000000000000000000000000000005109926", "1000000000000000000000000000000004551440",
"1000000000000000000000000000000005110096", "1000000000000000000000000000000004551360",
"1000000000000000000000000000000005110026", "1000000000000000000000000000000004551400",
"1000000000000000000000000000000005110036", "1000000000000000000000000000000004544780",
"1000000000000000000000000000000005109716", "1000000000000000000000000000000004551230",
"1000000000000000000000000000000005110006", "1000000000000000000000000000000004544770",
"1000000000000000000000000000000005109446", "1000000000000000000000000000000004551200",
"1000000000000000000000000000000005109606", "1000000000000000000000000000000004544850",
"1000000000000000000000000000000005109076", "1000000000000000000000000000000004548370",
"1000000000000000000000000000000004544840",
"1000000000000000000000000000000004548470",
"1000000000000000000000000000000004611380",
"1000000000000000000000000000000004611470",
"1000000000000000000000000000000004611440",
]; ];
const STX = "\x02"; const STX = "\x02";
const ETX = "\x03"; const ETX = "\x03";
const scanner = new net.Socket(); const scanner = new net.Socket();
scanner.connect(50000, "10.204.0.26", async () => { scanner.connect(50001, "10.80.0.26", async () => {
console.log("Connected to scanner"); console.log("Connected to scanner");
// change the scanner to the to 112 // change the scanner to the to 112
@@ -76,7 +81,14 @@ scanner.connect(50000, "10.204.0.26", async () => {
}); });
scanner.on("data", async (data) => { scanner.on("data", async (data) => {
console.log("Response:", data.toString("ascii")); console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
}); });
scanner.on("close", () => { scanner.on("close", () => {

View File

@@ -6,69 +6,131 @@ import net from "net";
const prodIP = "10.204.0.26"; const prodIP = "10.204.0.26";
const prodPort = 50000; const prodPort = 50000;
const scannerID = "98@"; const scannerID = "98@";
const scannerCommand = "AlplaPRODcmd00000042#000028547"; // top of the picksheet const scannerCommand = "AlplaPRODcmd00000042#000028643"; // top of the picksheet
const scannerCommand2 = ""; // bottom of the pick sheet const scannerCommand2 = ""; // bottom of the pick sheet
const labels = [ const labels = [
"1000000000000000000000000000000005566030", "1000000000000000000000000000000005572620",
"1000000000000000000000000000000005544896", "1000000000000000000000000000000005572630",
"1000000000000000000000000000000005544906", "1000000000000000000000000000000005572640",
"1000000000000000000000000000000005544916", "1000000000000000000000000000000005572650",
"1000000000000000000000000000000005544926", "1000000000000000000000000000000005572660",
"1000000000000000000000000000000005544936", "1000000000000000000000000000000005572670",
"1000000000000000000000000000000005544946", "1000000000000000000000000000000005572680",
"1000000000000000000000000000000005544956", "1000000000000000000000000000000005572690",
"1000000000000000000000000000000005544966", "1000000000000000000000000000000005572700",
"1000000000000000000000000000000005544976", "1000000000000000000000000000000005572710",
"1000000000000000000000000000000005544986", "1000000000000000000000000000000005572720",
"1000000000000000000000000000000005544996", "1000000000000000000000000000000005572730",
"1000000000000000000000000000000005545006", "1000000000000000000000000000000005572740",
"1000000000000000000000000000000005545016", "1000000000000000000000000000000005572750",
"1000000000000000000000000000000005545026", "1000000000000000000000000000000005572760",
"1000000000000000000000000000000005545036", "1000000000000000000000000000000005572770",
"1000000000000000000000000000000005545046", "1000000000000000000000000000000005572780",
"1000000000000000000000000000000005545056", "1000000000000000000000000000000005572790",
"1000000000000000000000000000000005545066", "1000000000000000000000000000000005572800",
"1000000000000000000000000000000005545076", "1000000000000000000000000000000005572810",
"1000000000000000000000000000000005545086", "1000000000000000000000000000000005572820",
"1000000000000000000000000000000005545096", "1000000000000000000000000000000005572830",
"1000000000000000000000000000000005545106", "1000000000000000000000000000000005572840",
"1000000000000000000000000000000005545116", "1000000000000000000000000000000005572850",
"1000000000000000000000000000000005545126", "1000000000000000000000000000000005572860",
"1000000000000000000000000000000005545136", "1000000000000000000000000000000005572870",
"1000000000000000000000000000000005545146", "1000000000000000000000000000000005572880",
"1000000000000000000000000000000005545156", "1000000000000000000000000000000005572890",
"1000000000000000000000000000000005545166", "1000000000000000000000000000000005572900",
"1000000000000000000000000000000005545176", "1000000000000000000000000000000005572910",
"1000000000000000000000000000000005545186", "1000000000000000000000000000000005573226",
"1000000000000000000000000000000005544580", "1000000000000000000000000000000005573236",
"1000000000000000000000000000000005544590", "1000000000000000000000000000000005573246",
"1000000000000000000000000000000005544600", "1000000000000000000000000000000005573256",
"1000000000000000000000000000000005544610", "1000000000000000000000000000000005573266",
"1000000000000000000000000000000005544640", "1000000000000000000000000000000005573276",
"1000000000000000000000000000000005544650", "1000000000000000000000000000000005573286",
"1000000000000000000000000000000005544660", "1000000000000000000000000000000005573296",
"1000000000000000000000000000000005544670", "1000000000000000000000000000000005573306",
"1000000000000000000000000000000005544680", "1000000000000000000000000000000005573316",
"1000000000000000000000000000000005544690", "1000000000000000000000000000000005573326",
"1000000000000000000000000000000005544700", "1000000000000000000000000000000005573336",
"1000000000000000000000000000000005544710", "1000000000000000000000000000000005573346",
"1000000000000000000000000000000005544720", "1000000000000000000000000000000005573356",
"1000000000000000000000000000000005544730", "1000000000000000000000000000000005573366",
"1000000000000000000000000000000005544740", "1000000000000000000000000000000005573376",
"1000000000000000000000000000000005544750", "1000000000000000000000000000000005573386",
"1000000000000000000000000000000005544760", "1000000000000000000000000000000005573396",
"1000000000000000000000000000000005544770", "1000000000000000000000000000000005573406",
"1000000000000000000000000000000005544780", "1000000000000000000000000000000005573416",
"1000000000000000000000000000000005544790", "1000000000000000000000000000000005573426",
"1000000000000000000000000000000005544800", "1000000000000000000000000000000005573436",
"1000000000000000000000000000000005544810", "1000000000000000000000000000000005573446",
"1000000000000000000000000000000005544820", "1000000000000000000000000000000005573456",
"1000000000000000000000000000000005544830", "1000000000000000000000000000000005573466",
"1000000000000000000000000000000005544840", "1000000000000000000000000000000005573476",
"1000000000000000000000000000000005544850", "1000000000000000000000000000000005573486",
"1000000000000000000000000000000005544860", "1000000000000000000000000000000005573496",
"1000000000000000000000000000000005544870", "1000000000000000000000000000000005573506",
"1000000000000000000000000000000005573516",
"1000000000000000000000000000000005581616",
"1000000000000000000000000000000005581626",
"1000000000000000000000000000000005581636",
"1000000000000000000000000000000005581646",
"1000000000000000000000000000000005581656",
"1000000000000000000000000000000005581666",
"1000000000000000000000000000000005581676",
"1000000000000000000000000000000005581686",
"1000000000000000000000000000000005581696",
"1000000000000000000000000000000005581706",
"1000000000000000000000000000000005581716",
"1000000000000000000000000000000005581726",
"1000000000000000000000000000000005581736",
"1000000000000000000000000000000005581746",
"1000000000000000000000000000000005581756",
"1000000000000000000000000000000005581766",
"1000000000000000000000000000000005581776",
"1000000000000000000000000000000005581786",
"1000000000000000000000000000000005581796",
"1000000000000000000000000000000005581806",
"1000000000000000000000000000000005581816",
"1000000000000000000000000000000005581826",
"1000000000000000000000000000000005581836",
"1000000000000000000000000000000005581846",
"1000000000000000000000000000000005581856",
"1000000000000000000000000000000005582760",
"1000000000000000000000000000000005581866",
"1000000000000000000000000000000005581876",
"1000000000000000000000000000000005581886",
"1000000000000000000000000000000005581896",
"1000000000000000000000000000000005581906",
"1000000000000000000000000000000005581310",
"1000000000000000000000000000000005581320",
"1000000000000000000000000000000005581330",
"1000000000000000000000000000000005581340",
"1000000000000000000000000000000005581350",
"1000000000000000000000000000000005581360",
"1000000000000000000000000000000005581370",
"1000000000000000000000000000000005581380",
"1000000000000000000000000000000005581390",
"1000000000000000000000000000000005581400",
"1000000000000000000000000000000005581410",
"1000000000000000000000000000000005581420",
"1000000000000000000000000000000005581430",
"1000000000000000000000000000000005581440",
"1000000000000000000000000000000005581450",
"1000000000000000000000000000000005581460",
"1000000000000000000000000000000005581470",
"1000000000000000000000000000000005581480",
"1000000000000000000000000000000005581490",
"1000000000000000000000000000000005581500",
"1000000000000000000000000000000005581510",
"1000000000000000000000000000000005581520",
"1000000000000000000000000000000005581530",
"1000000000000000000000000000000005581540",
"1000000000000000000000000000000005581550",
"1000000000000000000000000000000005581560",
"1000000000000000000000000000000005581570",
"1000000000000000000000000000000005581580",
"1000000000000000000000000000000005581590",
"1000000000000000000000000000000005581600",
]; ];
const STX = "\x02"; const STX = "\x02";
const ETX = "\x03"; const ETX = "\x03";
@@ -106,7 +168,14 @@ scanner.connect(prodPort, prodIP, async () => {
}); });
scanner.on("data", async (data) => { scanner.on("data", async (data) => {
console.log("Response:", data.toString("ascii")); console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
}); });
scanner.on("close", () => { scanner.on("close", () => {