feat(notification): plant to plant edi

This commit is contained in:
2026-03-10 08:18:29 -05:00
parent 2111a5fdc9
commit ea92422bb1
14 changed files with 428 additions and 69 deletions

View File

@@ -51,6 +51,7 @@ export const psiGetPlanningData = async (
};
}
// TODO: if we are not running planning we no pass the old structure if we are running new planning use the below improved version that makes sure we dont have negative numebrs.
articles = data.data;
return {

View File

@@ -145,7 +145,7 @@ app.openapi(
return c.json({
success: true,
message: "All Current Active Querys.",
sheetVersion: 2.8,
sheetVersion: 2.8, // TODO: when this gets switched change this
data: current,
});
},

View File

@@ -5,6 +5,7 @@ import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
type Data = {
runningNr: number;
@@ -16,7 +17,32 @@ export const relatePallet = async (data: Data) => {
// console.log(data);
// create the url to post
const url = await prodEndpointCreation("/public/v1.0/Warehousing/Relocate");
// do we have warehousing turned on?
const { data: feature, error: featureError } = (await tryCatch(
query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/Relocate";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/Relocate";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Unit/Relocate";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const SSCC = await createSSCC(runningNr);
const consumeSomething = {
ScannerId: 999,
@@ -40,7 +66,7 @@ export const relatePallet = async (data: Data) => {
};
}
if (results.data.Result !== 0) {
if (results.data.Result !== 0 || results.data.data.length <= 0) {
return {
success: false,
message: results.data.Message,

View File

@@ -1,6 +1,7 @@
import axios from "axios";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
export const postAdjustment = async (data: any) => {
if (data.warehouseId === undefined) {
@@ -35,9 +36,30 @@ export const postAdjustment = async (data: any) => {
quantity: data.quantity,
};
let url = await prodEndpointCreation(
"/public/v1.0/Warehousing/AdjustSiloStockLevel",
);
// do we have warehousing turned on?
const { data: feature, error: featureError } = (await tryCatch(
query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const { data: silo, error } = await tryCatch(
axios.post(url, siloAdjustment, {

View File

@@ -0,0 +1,160 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
export default async function platToPlantEdi(notifyData: any) {
createLog("info", "blocking", "notify", `monitoring ${notifyData.name}`);
const { data: noti, error: notiError } = await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, notifyData.name)),
);
if (notiError) {
createLog(
"error",
"edi",
"notify",
"Error in getting the notification data",
);
}
// get the default emails they can be blank if as we will only add these to the end of the email from the full flow
let emails = notifyData.email ?? "";
const checkBol = sqlQuerySelector("checkBol.query");
if (!checkBol.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
const pLinkedB = sqlQuerySelector("palletsLinkedToBol.query");
if (!pLinkedB.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
let ignoreBols: string[] = notifyData?.notifiySettings?.processedBol ?? [];
const joinBols = ignoreBols.join(",");
const { data: b, error: bError } = (await tryCatch(
query(
checkBol?.query
?.replace("[timeCheck]", notifyData.checkInterval ?? "30")
.replace("[ignoreBols]", joinBols ?? 500) ?? "",
"Check bol",
),
)) as any;
if (bError) {
return {
success: false,
message: "Error getting newly created bols",
data: bError,
};
}
const planedByBol = new Map<string, string[]>();
for (const row of b.data) {
if (!planedByBol.has(row.bol)) {
planedByBol.set(row.bol, []);
}
planedByBol.get(row.bol)!.push(String(row.idladeplanung));
}
if (b.data.length > 0) {
// loop each bol in the system and get the bols only
for (const [bolNumber, idList] of planedByBol.entries()) {
//for (const bol of b.data) {
// run the process to get the the pallet numbers
const joinedIdLadeplanung = idList.join(",");
//console.log("BOL:", bolNumber);
//console.log("IDLadeplanung string:", joinedIdLadeplanung);
//console.log("IgnoreBols: ", joinBols);
// now get the pallets that are witing the ladeplanning
const { data: pallets, error: pError } = await tryCatch(
query(
pLinkedB?.query?.replace(
"[palLinkedToBol]",
joinedIdLadeplanung ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
);
//console.log(pallets);
// console.log("Address: ", b.data[0].addressId ?? "0");
// if theres no email then just stop.
if (b.data[0].addressId === "") return;
ignoreBols.push(bolNumber);
if (ignoreBols.length > 15) {
ignoreBols.splice(0, ignoreBols.length - 15);
}
// get the email address.
const checkBol = sqlQuerySelector("addressInfo.query");
const { data: address, error: aError } = (await tryCatch(
query(
checkBol?.query?.replace(
"[customerAddress]",
b.data[0].addressId ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
)) as any;
// setup the email to be sent :D
const emailSetup = {
email: address.data[0].email,
subject: `New EDI transfer Created for BOL: ${bolNumber}`,
template: "plantToPlantEdi",
context: {
items: pallets?.data ?? [],
bol: bolNumber,
//secondarySetting: notifyData.notifiySettings,
},
};
// send the email
await sendEmail(emailSetup);
// add the bols to be ignored
await db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: { processedBol: ignoreBols },
})
.where(eq(notifications.name, notifyData.name));
}
return {
success: true,
message: "All bols have been processed",
data: [ignoreBols],
};
}
return {
success: true,
message: "No new bols have been created",
data: [],
};
}

View File

@@ -45,57 +45,57 @@ export const sendEmail = async (data: any): Promise<any> => {
};
}
// get the plantToken
const server = settingData.filter((n) => n.name === "server");
//const server = settingData.filter((n) => n.name === "server");
if (
server[0].value === "localhostx" &&
process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD
) {
transporter = nodemailer.createTransport({
service: "gmail",
host: "smtp.gmail.com",
port: 465,
auth: {
user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD,
},
//debug: true,
});
// if (
// server[0].value === "localhostx" &&
// process.env.EMAIL_USER &&
// process.env.EMAIL_PASSWORD
// ) {
// transporter = nodemailer.createTransport({
// service: "gmail",
// host: "smtp.gmail.com",
// port: 465,
// auth: {
// user: process.env.EMAIL_USER,
// pass: process.env.EMAIL_PASSWORD,
// },
// //debug: true,
// });
// update the from email
fromEmail = process.env.EMAIL_USER;
} else {
// convert to the correct plant token.
//const plantToken = settingData.filter((s) => s.name === "plantToken");
// // update the from email
// fromEmail = process.env.EMAIL_USER;
//} else {
// convert to the correct plant token.
//const plantToken = settingData.filter((s) => s.name === "plantToken");
// let host = `${plantToken[0].value}-smtp.alpla.net`;
// let host = `${plantToken[0].value}-smtp.alpla.net`;
// const testServers = ["test1", "test2", "test3"];
// const testServers = ["test1", "test2", "test3"];
// if (testServers.includes(plantToken[0].value)) {
// host = "USMCD1-smtp.alpla.net";
// }
// if (testServers.includes(plantToken[0].value)) {
// host = "USMCD1-smtp.alpla.net";
// }
// if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net";
// }
// if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net";
// }
transporter = nodemailer.createTransport({
host: "smtp.azurecomm.net",
port: 587,
//rejectUnauthorized: false,
tls: {
minVersion: "TLSv1.2",
},
auth: {
user: "donotreply@mail.alpla.com",
pass: process.env.SMTP_PASSWORD,
},
debug: true,
});
fromEmail = `DoNotReply@mail.alpla.com`;
}
transporter = nodemailer.createTransport({
host: "smtp.azurecomm.net",
port: 587,
//rejectUnauthorized: false,
tls: {
minVersion: "TLSv1.2",
},
auth: {
user: "donotreply@mail.alpla.com",
pass: process.env.SMTP_PASSWORD,
},
debug: true,
});
fromEmail = `DoNotReply@mail.alpla.com`;
//}
// creating the handlbar options
const viewPath = path.resolve(

View File

@@ -164,6 +164,16 @@ export const note: any = [
errorCount: 10, // change this to something else or leave blank to use the av type
},
},
{
name: "platToPlantEdi",
description:
"This is the plant to plant edi that will send an edi to the email once it ships, the emails will be for the receiving plants",
checkInterval: 15,
timeType: "min",
emails: "",
active: false,
notifiySettings: { processedBol: [500] },
},
];
export const notificationCreate = async () => {

View File

@@ -0,0 +1,46 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>BOL: {{bol}} was created with the below pallets.</p>
<p>Please head to stock and import the pallets via the normal incoming goods process.</p>
<p>When encountering a discrepancy in pallets/cages received, please correct this after the pallets have been imported.</p>
<p>This due to these being plant to plant the only way to correct this is bring them in then undo the incoming goods process.</p>
<br></br>
<table >
<thead>
<tr>
<th>Running Number</th>
<th>AV</th>
<th>Description</th>
<th>Lot number</th>
<th>Quantity</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{runningNr}}</td>
<td>{{article}}</td>
<td>{{alias}}</td>
<td>{{lotNumber}}</td>
<td>{{qty}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -167,23 +167,23 @@ export async function query(queryToRun: string, name: string) {
const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer",
) as any;
const serverUp = await checkHostnamePort(
`${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
);
// const serverUp = await checkHostnamePort(
// `${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
// );
if (!serverUp) {
createLog(
"error",
"lst",
"server",
`The sql ${dbServer[0].value} is not reachable`,
);
return {
success: false,
message: `The sql ${dbServer[0].value} is not reachable`,
data: [],
};
}
// if (!serverUp) {
// createLog(
// "error",
// "lst",
// "server",
// `Failed to run query due to ${dbServer[0].value} not being reachable.`,
// );
// return {
// success: false,
// message: `Failed to run query due to ${dbServer[0].value} not being reachable.`,
// data: [],
// };
// }
if (!connected) {
createLog(

View File

@@ -0,0 +1,14 @@
use [test3_AlplaPROD2.0_Read]
select
humanreadableId as addressId
,ContactEmail as email
,ContactName
,ContactPhoneNumber
,Name
,Street
,City
,ZipCode
--,*
from [masterData].[Address] (nolock)
where humanreadableid = [customerAddress]

View File

@@ -0,0 +1,43 @@
use AlplaPROD_test1
/**
check if we have any new alpla bols that were created
*/
SELECT
x.idladeplanung
,e.idjournal
,e.journalNummer as bol
,e.idjournalstatus
,e.ladeDatum as loadDate
,e.bemerkung
,e.ereporting_idwerk
,e.journalDatum
,a.idadressen as addressId
,a.bezeichnung as addressDescription
,a.strasse as streetAddress
,a.ort as cityState
,a.plz as zipcode
,idauftrag as releaseNumber
--,*
FROM [dbo].[T_EAIJournal] as e with (nolock)
-- pull in the address so we only pull in florence data
left join
[dbo].[T_EAIJournalAdresse] as a with (nolock) on
a.[IdJournalAdresse] = [IdJournalKundenAdresse]
-- get the table to link the pallets to the bol
left join
[dbo].[T_EAIJournalPosition] as x with (nolock) on
x.idjournal = e.idjournal
where idjournalStatus = 62
--and idadressen = 270
--and a.bezeichnung like '%Alpla%' -- we only want to monitor for addresses that are linked to alpla.
and JournalDatum > DATEADD(MINUTE, -[timeCheck], GETDATE())
and e.idjournal not in ([ignoreBols])
and idauftrag > 1 -- this will ignore all incoming goodsv as we are really only looking for outbound deliveries
order by JournalDatum desc

View File

@@ -0,0 +1,36 @@
use AlplaPROD_test1
select * from (SELECT
p.[IdLadePlanung]
,p.[Beleg] as lotNumber
,p.[LfdNrJeArtikelKunde] as runningNr
,p.[Barcode]
,p.[ProduktionsDatum] as productionDate
,p.[Add_User] as scanDate
,p.[Add_Date]
,p.[Upd_User]
,p.[Upd_Date]
,p.[IdJournalWarenPosition]
,p.[LieferMenge] as qty
-- ,av.IdArtikelvarianten as article
-- ,av.Bezeichnung as alias
,av.articlehumanreadableid as article
,av.ArticleDescription as alias
--,[SSCC_ReserveZiffer]
,ROW_NUMBER() OVER (PARTITION BY p.[LfdNrJeArtikelKunde] ORDER BY p.upd_date DESC) AS RowNum
--,*
FROM [dbo].[T_EAIJournalLieferPosition] as p (nolock)
-- left join
-- dbo.T_ProdPlanung as l on
-- l.IdProdPlanung = p.Beleg
left join
[test3_AlplaPROD2.0_Read].labelling.InternalLabel as av on
av.RunningNumber = p.[LfdNrJeArtikelKunde]
) as a
where idladeplanung in ([palLinkedToBol]) and RowNum = 1
order by runningNr