diff --git a/server/services/notifications/controller/notifications/downTimeCheck.ts b/server/services/notifications/controller/notifications/downTimeCheck.ts
index fa0b3cb..fc5ddb5 100644
--- a/server/services/notifications/controller/notifications/downTimeCheck.ts
+++ b/server/services/notifications/controller/notifications/downTimeCheck.ts
@@ -92,7 +92,7 @@ export default async function reprintLabelMonitor(notifyData: any) {
) {
//send the email :D
const emailSetup = {
- emailTo: notifyData.emails,
+ email: notifyData.emails,
subject: `Alert! Downtime recorded greater than ${
notifyData.notifiySettings?.duration
}min ${
diff --git a/server/services/notifications/controller/notifications/productionCheck.ts b/server/services/notifications/controller/notifications/productionCheck.ts
index 9f42374..be81c0e 100644
--- a/server/services/notifications/controller/notifications/productionCheck.ts
+++ b/server/services/notifications/controller/notifications/productionCheck.ts
@@ -65,7 +65,10 @@ export default async function reprintLabelMonitor(notifyData: any) {
`;
//update the time check
- notifyQuery = notifyQuery.replaceAll("[timeCheck]", notifyData.checkTime);
+ notifyQuery = notifyQuery.replaceAll(
+ "[timeCheck]",
+ notifyData.checkInterval
+ );
notifyQuery = notifyQuery.replaceAll(
"[locations]",
notifyData.notifiySettings.locations
@@ -82,13 +85,13 @@ export default async function reprintLabelMonitor(notifyData: any) {
// update the count with the result
const emailSetup = {
- emailTo: notifyData.emails,
+ email: notifyData.emails,
subject: `Alert! Pallets in production greater than ${notifyData.checkTime} ${notifyData.timeType}`,
template: "productionCheck",
context: {
items: prod,
count: prod.length,
- checkTime: notifyData.checkTime,
+ checkTime: notifyData.checkInterval,
timeCheck: notifyData.timeType,
},
};
diff --git a/server/services/notifications/controller/notifications/qualityBlocking.ts b/server/services/notifications/controller/notifications/qualityBlocking.ts
index 68217ff..497fa95 100644
--- a/server/services/notifications/controller/notifications/qualityBlocking.ts
+++ b/server/services/notifications/controller/notifications/qualityBlocking.ts
@@ -1,5 +1,3 @@
-// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
-
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
@@ -7,152 +5,121 @@ import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
-
-export interface Blocking {
- HumanReadableId?: number;
- subject?: string;
-}
+import { blockQuery } from "../../../sqlServer/querys/notifications/blocking.js";
export default async function qualityBlockingMonitor(notifyData: any) {
- createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
+ createLog("info", "blcoking", "notify", `monitoring ${notifyData.name}`);
if (notifyData.emails === "") {
createLog(
"error",
- "notify",
+ "blocking",
"notify",
`There are no emails set for ${notifyData.name}`
);
-
- return;
+ return {
+ success: false,
+ message: `There are no emails set for ${notifyData.name}`,
+ };
}
- let blockQuery = `
- SELECT
- 'Alert! new blocking order: #' + cast(HumanReadableId as varchar) + ' - ' + ArticleVariantDescription as subject,
- cast([HumanReadableId] as varchar) as blockingNumber,
- [ArticleVariantDescription] as article,
- cast([CustomerHumanReadableId] as varchar) + ' - ' + [CustomerDescription] as customer,
- convert(varchar(10), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 101) + ' - ' + convert(varchar(5), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 108) as blockingDate,
- cast(ArticleVariantHumanReadableId as varchar) + ' - ' + ArticleVariantDescription as av,
- case when [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark = '' or [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark is NULL then 'Please reach out to quality for the reason this was placed on hold as a remark was not entered during the blocking processs' else [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark end as remark,
- cast(FORMAT(TotalAmountOfPieces, '###,###') as varchar) + ' / ' + cast(LoadingUnit as varchar) as peicesAndLoadingUnits,
- [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId as lotNumber,
- cast(IdGlobalBlockingDefectsGroup as varchar) + ' - ' + BD.Description as mainDefectGroup,
- cast(IdGlobalBlockingDefect as varchar) + ' - ' + MD.Description as mainDefect,
- sent=0,
- lot.MachineLocation as line,
- HumanReadableId
- FROM [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder] (nolock)
-
- /*** Join 1.0 table to get correct id info to link ***/
- join
- [AlplaPROD_test1].[dbo].[T_BlockingOrders] (nolock) AS BO
- on [HumanReadableId] = BO.[IdBlockingOrder]
-
-
- /*** Get the main defect info ***/
- Inner join
- [AlplaPROD_test1].[dbo].[T_BlockingDefectsGroups] (nolock) as BD
- ON BO.IdMainDefectGroup = BD.IdBlockingDefectsGroup
-
- INNER join
- [AlplaPROD_test1].[dbo].[T_BlockingDefects] as MD
- ON BO.IdMainDefect = MD.IdBlockingDefect
- /*** get lot info ***/
-
- left join
- (SELECT [MachineLocation]
- ,[MachineDescription]
- ,[ProductionLotHumanReadableId]
- FROM [test1_AlplaPROD2.0_Reporting].[reporting_productionControlling].[ProducedLot]) as lot
- on [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId = lot.ProductionLotHumanReadableId
-
- where [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate] between getdate() - 1 and getdate() + 1
- and [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].BlockingTrigger = 1
- and HumanReadableId NOT IN ([sentBlockingOrders])
- `;
-
- //add the blocking orders in.
- blockQuery = blockQuery.replaceAll(
- "[sentBlockingOrders]",
- notifyData.sentBlocking[0].sentBlockingOrders
+ const { data: noti, error: notiError } = await tryCatch(
+ db
+ .select()
+ .from(notifications)
+ .where(eq(notifications.name, notifyData.name))
);
- let blocking: any;
- try {
- blocking = await query(blockQuery, "Quality Blocking");
- //console.log(labels.length);
- // const now = Date.now()
- //console.log(blocking);
- // console.log(blocking[0].blockingNumber > data.prodID);
- if (
- blocking.length > 0 &&
- blocking[0].HumanReadableId > notifyData.notifiySettings.prodID
- ) {
- //send the email :D
- const emailSetup = {
- emailTo: notifyData.emails,
- subject:
- blocking.length > 0
- ? `Alert! New blocking orders.`
- : blocking[0].subject,
- template: "qualityBlocking",
- context: {
- items: blocking,
- },
- };
-
- const sentEmail = await sendEmail(emailSetup);
-
- if (!sentEmail.success) {
- createLog(
- "error",
- "nofity",
- "notify",
- "Failed to send email, will try again on next interval"
- );
- return;
- }
-
- // add the new blocking order to this
- const newBlockingOrders = blocking.map(
- (b: any) => b.HumanReadableId
- );
-
- //console.log(newBlockingOrders);
- //console.log(sentBlocking[0].sentBlockingOrders);
- // Ensure no duplicates
- const uniqueOrders = Array.from(
- new Set([
- ...notifyData.sentBlocking[0].sentBlockingOrders,
- ...newBlockingOrders,
- ])
- );
-
- // Update sentBlockingOrders
- notifyData.sentBlocking[0].sentBlockingOrders = uniqueOrders;
-
- //console.log(notifUpdate);
-
- const { data, error } = await tryCatch(
- db
- .update(notifications)
- .set({
- lastRan: sql`NOW()`,
- notifiySettings: {
- ...notifyData.notifiySettings,
- prodID: blocking[0].HumanReadableId,
- sentBlockingOrders: uniqueOrders,
- },
- })
- .where(eq(notifications.name, notifyData.name))
- );
+ const notiData: any = noti;
+ const blockingOrders = notiData[0]?.notifiySettings.sentBlockingOrders.map(
+ (l: any) => {
+ return l.blockingOrder;
}
- } catch (err) {
- createLog(
- "error",
- "notify",
- "notify",
- `Error from running the blocking query: ${err}`
- );
+ );
+
+ let blockingQuery = blockQuery.replaceAll(
+ "[sentBlockingOrders]",
+ blockingOrders
+ );
+
+ const { data: blocking, error: blockingError } = await tryCatch(
+ query(blockingQuery, "Quality Blocking")
+ );
+
+ if (blockingError) {
+ return {
+ success: false,
+ message: "Error getting blocking orders",
+ data: blockingError,
+ };
}
+
+ if (blocking.length > 0) {
+ const emailSetup = {
+ email: notifyData.emails,
+ subject:
+ blocking.length > 0
+ ? `Alert! New blocking orders.`
+ : blocking[0].subject,
+ template: "qualityBlocking",
+ context: {
+ items: blocking,
+ },
+ };
+
+ const { data: sentEmail, error: sendEmailError } = await tryCatch(
+ sendEmail(emailSetup)
+ );
+ if (sendEmailError) {
+ createLog(
+ "error",
+ "blocking",
+ "notify",
+ "Failed to send email, will try again on next interval"
+ );
+ return {
+ success: false,
+ message:
+ "Failed to send email, will try again on next interval",
+ };
+ }
+
+ const newBlockingOrders = blocking.map((b: any) => {
+ return {
+ blockingOrder: b.HumanReadableId,
+ timeStamp: new Date(Date.now()),
+ };
+ });
+ const uniqueOrders = Array.from(
+ new Set([
+ ...notifyData.notifiySettings.sentBlockingOrders,
+ ...newBlockingOrders,
+ ])
+ );
+
+ const { data, error } = await tryCatch(
+ db
+ .update(notifications)
+ .set({
+ lastRan: sql`NOW()`,
+ notifiySettings: {
+ ...notifyData.notifiySettings,
+ prodID: blocking[0].HumanReadableId,
+ sentBlockingOrders: uniqueOrders,
+ },
+ })
+ .where(eq(notifications.name, notifyData.name))
+ );
+ if (error) {
+ return {
+ success: false,
+ message: "Error updating the blocking orders",
+ data: error,
+ };
+ }
+ }
+
+ return {
+ success: true,
+ message: "Blocking query ran successfully",
+ blocking,
+ };
}
diff --git a/server/services/notifications/controller/notifications/reprintLabels.ts b/server/services/notifications/controller/notifications/reprintLabels.ts
index 648798d..7834cfb 100644
--- a/server/services/notifications/controller/notifications/reprintLabels.ts
+++ b/server/services/notifications/controller/notifications/reprintLabels.ts
@@ -13,13 +13,13 @@ const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
- createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
+ createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
- "notify",
+ "reprinting",
"notify",
`There are no emails set for ${notifyData.name}`
);
@@ -31,13 +31,14 @@ const notification = async (notifyData: any) => {
// set the time of getting the label
if (notifyData.timeType === "sec") {
- timeCheck = `DATEADD(SECOND, -${notifyData.checkTime}, getdate()) `;
+ timeCheck = `DATEADD(SECOND, -${notifyData.checkInterval}, getdate()) `;
} else if (notifyData.timeType === "min") {
- timeCheck = `DATEADD(MINUTE, -${notifyData.checkTime}, getdate()) `;
+ timeCheck = `DATEADD(MINUTE, -${notifyData.checkInterval}, getdate()) `;
}
let reprintQuery = `
SELECT
+ IdEtikettenHistorie,
IdArtikelvarianten as av,
ArtikelVariantenBez as alias,
LfdNr as runningNumber,
@@ -55,6 +56,7 @@ const notification = async (notifyData: any) => {
`;
//update the time check
+
reprintQuery = reprintQuery.replaceAll(
"DATEADD(SECOND, -30, getdate()) ",
timeCheck
@@ -66,10 +68,20 @@ const notification = async (notifyData: any) => {
query(reprintQuery, "Label Reprints")
);
+ if (labelError) {
+ createLog(
+ "error",
+ "reprinting",
+ "notify",
+ `Failed to get the labels: ${labelError}`
+ );
+ return;
+ }
+
if (labels.length > 0) {
//send the email :D
const emailSetup = {
- emailTo: notifyData.emails,
+ email: notifyData.emails,
subject: "Alert! Label Reprinted",
template: "reprintLabels",
context: {
@@ -82,7 +94,7 @@ const notification = async (notifyData: any) => {
if (!sentEmail.success) {
createLog(
"error",
- "notify",
+ "reprinting",
"notify",
"Failed to send email, will try again on next interval"
);
@@ -96,7 +108,6 @@ const notification = async (notifyData: any) => {
// };
// update the last time ran
- const updateSettings = notifyData.notifiySettings;
const { data, error } = await tryCatch(
db
@@ -104,7 +115,7 @@ const notification = async (notifyData: any) => {
.set({
lastRan: sql`NOW()`,
notifiySettings: {
- ...updateSettings,
+ ...notifyData.notifiySettings,
prodID: labels[0].IdEtikettenHistorie,
},
})
diff --git a/server/services/notifications/controller/notifications/stagingCheck.ts b/server/services/notifications/controller/notifications/stagingCheck.ts
index 24e0678..1dad56a 100644
--- a/server/services/notifications/controller/notifications/stagingCheck.ts
+++ b/server/services/notifications/controller/notifications/stagingCheck.ts
@@ -64,7 +64,7 @@ export default async function reprintLabelMonitor(notifyData: any) {
//update the time check
noteQuery = noteQuery
- .replaceAll("[timeCheck]", notifyData.checkTime)
+ .replaceAll("[timeCheck]", notifyData.checkInterval)
.replaceAll("[locations]", notifyData.notifiySettings.locations);
let stage: PPOO[];
@@ -78,13 +78,13 @@ export default async function reprintLabelMonitor(notifyData: any) {
// update the count with the result
const emailSetup = {
- emailTo: notifyData.emails,
- subject: `Alert! Pallets in staging greater than ${notifyData.checkTime} ${notifyData.timeType}`,
+ email: notifyData.emails,
+ subject: `Alert! Pallets in staging greater than ${notifyData.checkInterval} ${notifyData.timeType}`,
template: "stagingCheck",
context: {
items: stage,
count: stage.length,
- checkTime: notifyData.checkTime,
+ checkTime: notifyData.checkInterval,
timeCheck: notifyData.timeType,
},
};
diff --git a/server/services/notifications/controller/notifications/tiFullFlow/dateCorrection.ts b/server/services/notifications/controller/notifications/tiFullFlow/dateCorrection.ts
new file mode 100644
index 0000000..d9b324a
--- /dev/null
+++ b/server/services/notifications/controller/notifications/tiFullFlow/dateCorrection.ts
@@ -0,0 +1,20 @@
+export const dateCorrection = async (newDate: Date) => {
+ /**
+ * corrects the date format to be what ti is expecting
+ */
+
+ const newDateFormat = new Date(newDate)
+ .toLocaleString("en-US", {
+ timeZone: "UTC",
+ year: "numeric",
+ month: "2-digit",
+ day: "2-digit",
+ hour: "2-digit",
+ minute: "2-digit",
+ second: "2-digit",
+ hourCycle: "h23", // Ensures 24-hour format
+ })
+ .replace(",", "");
+
+ return newDateFormat;
+};
diff --git a/server/services/notifications/controller/notifications/tiFullFlow/headerUpdate.ts b/server/services/notifications/controller/notifications/tiFullFlow/headerUpdate.ts
new file mode 100644
index 0000000..cb4b315
--- /dev/null
+++ b/server/services/notifications/controller/notifications/tiFullFlow/headerUpdate.ts
@@ -0,0 +1,23 @@
+const requestUser = process.env.REQUESTUSER || "";
+export const headerUpdate = async (data: any, plantToken: any) => {
+ // update the header
+ let webHeader = `
+ [requestID]
+
+
+
+ [requestID].XML
+ SOTransportLoader
+ [requestUser]
+
+ `;
+
+ webHeader = webHeader
+ .replaceAll(
+ "[requestID]",
+ `${data[0].releaseNumber}-${plantToken[0].value}`
+ )
+ .replaceAll("[requestUser]", requestUser);
+
+ return webHeader;
+};
diff --git a/server/services/notifications/controller/notifications/tiFullFlow/loadItems.ts b/server/services/notifications/controller/notifications/tiFullFlow/loadItems.ts
new file mode 100644
index 0000000..d69517d
--- /dev/null
+++ b/server/services/notifications/controller/notifications/tiFullFlow/loadItems.ts
@@ -0,0 +1,54 @@
+import { freightClass } from "../../../../../globalUtils/freightClass.js";
+
+export const loadItems = async (data: any) => {
+ let itemGroups = "";
+
+ for (let i = 0; i < data.length; i++) {
+ let newItem = `
+
+
+
+
+ ${(
+ data[i].pkgLengh / 25.4
+ ).toFixed(2)}
+ ${(
+ data[i].pkgWidth / 25.4
+ ).toFixed(2)}
+ ${Math.round(
+ data[i].pkgHeight / 25.4
+ ).toFixed(2)}
+
+ ${`av ${data[i].article} ${data[i].articleAlias}`}
+
+ ${freightClass(
+ data[i].pkgWeight,
+ data[i].pkgLengh,
+ data[i].pkgWidth,
+ data[i].pkgHeight
+ )}
+
+
+
+ false
+
+
+ ${
+ data[i].pkgWeight * data[i].Pallets
+ }
+
+
+ ${
+ data[i].Pallets
+ }
+
+
+ `;
+
+ itemGroups += newItem;
+ }
+
+ return itemGroups;
+};
diff --git a/server/services/notifications/controller/notifications/tiFullFlow/postToTI.ts b/server/services/notifications/controller/notifications/tiFullFlow/postToTI.ts
new file mode 100644
index 0000000..9db6160
--- /dev/null
+++ b/server/services/notifications/controller/notifications/tiFullFlow/postToTI.ts
@@ -0,0 +1,40 @@
+import axios from "axios";
+import querystring from "querystring";
+import { createLog } from "../../../../logger/logger.js";
+//tiCreds
+const userid = process.env.USERID || "";
+const password = process.env.PASSWORD || "";
+
+export const postToTi = async (data: string) => {
+ const formBody = querystring.stringify({
+ userid,
+ password,
+ request: data,
+ });
+ axios
+ .post(
+ "https://t-insightws.mercurygate.net/MercuryGate/common/remoteService.jsp",
+ formBody,
+ {
+ headers: {
+ "Content-Type": "application/x-www-form-urlencoded",
+ },
+ }
+ )
+ .then((response) => {
+ //console.log(response.data)
+ createLog("info", "ti", "notify", "Data was sent over to TI");
+ return {
+ success: true,
+ message: "Data was sent over to TI",
+ };
+ })
+ .catch((error) => {
+ createLog("error", "ti", "notify", error);
+ return {
+ success: false,
+ message: "Error sending data to TI",
+ data: error,
+ };
+ });
+};
diff --git a/server/services/notifications/controller/notifications/tiFullFlow/scacCodeCheck.ts b/server/services/notifications/controller/notifications/tiFullFlow/scacCodeCheck.ts
new file mode 100644
index 0000000..7c088e3
--- /dev/null
+++ b/server/services/notifications/controller/notifications/tiFullFlow/scacCodeCheck.ts
@@ -0,0 +1,33 @@
+import { createLog } from "../../../../logger/logger.js";
+
+export const scacCheck = async (data: any) => {
+ createLog(
+ "info",
+ "ti",
+ "notify",
+ `Checking if ${data[0].addressAlias} has scac: ${
+ data[0].remark.split(",")[0] ? "there was one" : "no scac"
+ }`
+ );
+
+ const priceSheet = `
+
+
+
+ ${
+ data[0].remark.split(",")[0]
+ ? `${data[0].remark
+ .split(",")[0]
+ .split(":")[1]
+ .toUpperCase()}`
+ : ``
+ }
+
+
+
+ `;
+
+ return priceSheet;
+};
diff --git a/server/services/notifications/controller/notifications/tiFullFlow/tiImport.ts b/server/services/notifications/controller/notifications/tiFullFlow/tiImport.ts
new file mode 100644
index 0000000..84aedc5
--- /dev/null
+++ b/server/services/notifications/controller/notifications/tiFullFlow/tiImport.ts
@@ -0,0 +1,301 @@
+import { eq, sql } from "drizzle-orm";
+import { db } from "../../../../../../database/dbclient.js";
+import { serverData } from "../../../../../../database/schema/serverData.js";
+import { settings } from "../../../../../../database/schema/settings.js";
+import { tryCatch } from "../../../../../globalUtils/tryCatch.js";
+import { notifications } from "../../../../../../database/schema/notifications.js";
+import { getHeaders } from "../../../../sqlServer/querys/notifications/ti/getHeaders.js";
+import { query } from "../../../../sqlServer/prodSqlServer.js";
+import { createLog } from "../../../../logger/logger.js";
+import { getOrderToSend } from "../../../../sqlServer/querys/notifications/ti/getOrderToSend.js";
+import { xmlPayloadTI } from "./tiXmlPayload.js";
+import { headerUpdate } from "./headerUpdate.js";
+import { loadItems } from "./loadItems.js";
+import { dateCorrection } from "./dateCorrection.js";
+import { scacCheck } from "./scacCodeCheck.js";
+import { postToTi } from "./postToTI.js";
+
+export const tiImport = async () => {
+ // get the plant token
+ let payload = xmlPayloadTI;
+ const { data: plantData, error: plantError } = await tryCatch(
+ db.select().from(settings)
+ );
+
+ if (plantError)
+ return {
+ success: false,
+ message: "Error Getting Plant Data.",
+ data: plantError,
+ };
+ const plantToken = plantData?.filter((n) => n.name === "plantToken");
+
+ const { data: plantInfo, error: plantEr } = await tryCatch(
+ db
+ .select()
+ .from(serverData)
+ .where(eq(serverData.plantToken, plantToken[0].value))
+ );
+
+ if (plantEr)
+ return {
+ success: false,
+ message: "Error Getting Plant Data.",
+ data: plantEr,
+ };
+
+ // parsing posting window
+ const plantI = plantInfo!;
+
+ // order notifications
+ const { data: notificationSet, error: notificationSettingsErr } =
+ await tryCatch(
+ db
+ .select()
+ .from(notifications)
+ .where(eq(notifications.name, "tiIntergration"))
+ );
+ if (notificationSettingsErr)
+ return {
+ success: false,
+ message: "Notification missing.",
+ data: notificationSettingsErr,
+ };
+
+ const notiSet: any = notificationSet;
+ const customerAccountNum = plantI[0].customerTiAcc as string; // tiIntergration
+ // get current releaes not in the already sent oders
+
+ const releaseString = notiSet[0].notifiySettings.releases
+
+ .map((num: any) => `'${num.releaseNumber}'`)
+ .join(", ");
+
+ let orders = getHeaders
+ .replaceAll("[from]", notiSet[0]?.notifiySettings.start)
+ .replaceAll("[to]", notiSet[0]?.notifiySettings.end)
+ .replaceAll("[exclude]", releaseString);
+
+ // get the headers pending
+ const { data: header, error: headerError } = await tryCatch(
+ query(orders, "Ti get open headers")
+ );
+ if (headerError) {
+ createLog(
+ "error",
+ "ti",
+ "notify",
+ `Error getting headers: ${headerError}`
+ );
+ return {
+ success: false,
+ message: "Error getting headers",
+ data: headerError,
+ };
+ }
+
+ if (header.length === 0) {
+ createLog(
+ "info",
+ "ti",
+ "notify",
+ "There are no pending orders to be sent over to ti."
+ );
+ return {
+ success: true,
+ message: "There are no pending orders to be sent over to ti.",
+ };
+ }
+
+ createLog(
+ "info",
+ "tiIntergration",
+ "notify",
+ `There are a total of ${header.length} to send over`
+ );
+
+ /**
+ * Update the query to get only the first header
+ */
+
+ // update query to have the correct plant token
+ let orderToSend = getOrderToSend
+
+ .replaceAll("test1", plantToken[0].value)
+ .replaceAll("[releaseToProcess]", `'${header[0].releaseNumber}'`)
+ .replaceAll("[from]", notiSet[0].notifiySettings.start)
+ .replaceAll("[to]", notiSet[0].notifiySettings.end);
+
+ // get the headers pending
+ const { data: orderData, error: ordersError } = await tryCatch(
+ query(orderToSend, "Ti get open headers")
+ );
+ if (ordersError)
+ return {
+ success: false,
+ message: "Error getting getting orders",
+ data: ordersError,
+ };
+
+ // update the special instructions section
+ const otherSettings = plantI[0]?.otherSettings as {
+ specialInstructions: string;
+ active: boolean;
+ }[];
+
+ const specialInfo = otherSettings[0].specialInstructions.replaceAll(
+ "[header]",
+ orderData[0].Header
+ );
+
+ // add the full amount of pallets sending over
+ let fullPalToSend = orderData.reduce(
+ (acc: any, o: any) => acc + o.Pallets,
+ 0
+ );
+
+ //console.log("payload", payload);
+ payload = payload
+ .replaceAll(
+ `[WebImportHeader]`,
+ await headerUpdate(orderData, plantToken)
+ )
+ .replaceAll(`[items]`, await loadItems(orderData))
+ .replaceAll(`[customerAccountNum]`, customerAccountNum)
+ .replaceAll("[fullTotalPal]", fullPalToSend)
+ // add in release info
+ .replaceAll(`[shipNumber]`, orderData[0].releaseNumber)
+ .replaceAll(`[loadNumber]`, orderData[0].releaseNumber);
+
+ // add in the multi release numbers
+ let multiRelease = ``;
+ if (orderData.length > 0) {
+ for (let i = 0; i < orderData.length; i++) {
+ const newRelease = `
+ ${orderData[i].releaseNumber}`;
+ multiRelease += newRelease;
+ }
+
+ payload = payload.replaceAll("[multieReleaseNumber]", multiRelease);
+ } else {
+ payload = payload.replaceAll("[multieReleaseNumber]", "");
+ }
+
+ // add the correct date stuff
+ payload = payload
+ .replaceAll(
+ "[loadingDate]",
+ await dateCorrection(orderData[0].LoadingDate)
+ )
+
+ .replaceAll(
+ "[deliveryDate]",
+ await dateCorrection(orderData[0].DeliveryDate)
+ );
+
+ // shipping ours corrections
+ const formattedDate = orderData[0].LoadingDate.toLocaleDateString("en-US", {
+ month: "2-digit",
+ day: "2-digit",
+ year: "numeric",
+ });
+ const shippingHours = JSON.parse(plantI[0]?.shippingHours!);
+
+ payload = payload
+ .replaceAll(
+ "[shippingHoursEarly]",
+ `${formattedDate} ${shippingHours[0].early}`
+ )
+ .replaceAll(
+ "[shippingHoursLate]",
+ `${formattedDate} ${shippingHours[0].late}`
+ );
+
+ // special instructions
+ if (otherSettings[0].specialInstructions.length != 0) {
+ payload = payload.replaceAll("[specialInstructions]", specialInfo);
+ } else {
+ payload = payload.replaceAll("[specialInstructions]", "");
+ }
+
+ // shipper info
+ payload = payload
+ .replaceAll("[plantName]", `Alpla ${plantI[0]?.sName!}`)
+ .replaceAll("[plantStreetAddress]", plantI[0]?.streetAddress!)
+ .replaceAll("[plantCity]", plantI[0]?.cityState!.split(",")[0])
+ .replaceAll("[plantState]", plantI[0]?.cityState!.split(",")[1])
+ .replaceAll("[plantZipCode]", plantI[0]?.zipcode!)
+ .replaceAll("[contactNum]", plantI[0]?.contactPhone!)
+ .replaceAll("[contactEmail]", plantI[0]?.contactEmail!)
+
+ // customer info
+ .replaceAll("[customerName]", orderData[0].addressAlias)
+ .replaceAll("[customerStreetAddress]", orderData[0].streetAddress)
+ .replaceAll("[customerCity]", orderData[0].city.split(",")[0])
+ .replaceAll("[customerState]", orderData[0].city.split(",")[1])
+ .replaceAll("[customerZip]", orderData[0].zipCode)
+ .replaceAll("[customerPO]", orderData[0].Header)
+ .replaceAll(
+ "[glCoding]",
+ `52410-${
+ orderData[0].artileType.toLowerCase() === "preform" ||
+ orderData[0].artileType.toLowerCase() === "metalCage"
+ ? 31
+ : plantI[0].greatPlainsPlantCode
+ }`
+ ) // {"52410 - " + (artileType.toLowerCase() === "preform" || artileType.toLowerCase() === "metalCage" ? 31: plantInfo[0].greatPlainsPlantCode)}
+ .replaceAll(
+ "[pfc]",
+ `${
+ orderData[0].artileType.toLowerCase() === "preform" ||
+ orderData[0].artileType.toLowerCase() === "metalCage"
+ ? 40
+ : orderData[0].costCenter
+ }`
+ )
+ .replaceAll("[priceSheet]", await scacCheck(orderData));
+ //send over to be processed
+
+ //console.log("payload", payload);
+
+ const { data: tiPost, error: tiError } = await tryCatch(postToTi(payload));
+
+ if (tiError) {
+ return {
+ success: false,
+ message: "Error posting to TI",
+ error: tiError,
+ };
+ }
+
+ /**
+ * Update the db so we dont try to pull the next one
+ */
+
+ const uniqueOrders = Array.from(
+ new Set([
+ ...notiSet[0].notifiySettings.releases,
+ {
+ releaseNumber: header[0].releaseNumber,
+ timeStamp: new Date(Date.now()),
+ },
+ ])
+ );
+
+ const { data, error } = await tryCatch(
+ db
+ .update(notifications)
+ .set({
+ lastRan: sql`NOW()`,
+ notifiySettings: {
+ ...notiSet[0].notifiySettings,
+ releases: uniqueOrders,
+ },
+ })
+ .where(eq(notifications.name, "tiIntergration"))
+ );
+
+ createLog("info", "ti", "notify", "done with this order");
+
+ return { message: "done with this order" };
+};
diff --git a/server/services/notifications/controller/notifications/tiFullFlow/tiXmlPayload.ts b/server/services/notifications/controller/notifications/tiFullFlow/tiXmlPayload.ts
index 78f5139..5ed407b 100644
--- a/server/services/notifications/controller/notifications/tiFullFlow/tiXmlPayload.ts
+++ b/server/services/notifications/controller/notifications/tiFullFlow/tiXmlPayload.ts
@@ -1,9 +1,6 @@
export let xmlPayloadTI = `
ImportWeb
-[requestID]
-
-
[WebImportHeader]
@@ -35,19 +32,18 @@ export let xmlPayloadTI = `
-
- // get this from the price sheet
-
-
-
-
-
+[priceSheet]
+
+
+[specialInstructions]
+
+
-[loadingDate]
-[deliveryDate]
+[shippingHoursEarly]
+[shippingHoursLate]
@@ -119,24 +115,20 @@ export let xmlPayloadTI = `
-6
+
+[fullTotalPal]
+
-[loadingDate]
-[loadingDate]
+[shippingHoursEarly]
+[shippingHoursLate]
[deliveryDate]
[deliveryDate]
-
-
-
-
-
-
-
+[priceSheet]
@@ -160,7 +152,7 @@ export let xmlPayloadTI = `
-[customer]
+[customerName]
[customerStreetAddress]
[customerCity]
@@ -168,6 +160,17 @@ export let xmlPayloadTI = `
[customerZip]
USA
+
+
+
+ Alpla
+
+
+ [contactNum]
+ [contactEmail]
+
+
+
diff --git a/server/services/notifications/controller/notifications/tiIntergration.ts b/server/services/notifications/controller/notifications/tiIntergration.ts
index efa7ae9..59ea112 100644
--- a/server/services/notifications/controller/notifications/tiIntergration.ts
+++ b/server/services/notifications/controller/notifications/tiIntergration.ts
@@ -1,413 +1,10 @@
-import { xmlPayloadTI } from "./tiFullFlow/tiXmlPayload.js";
-import axios from "axios";
-import querystring from "querystring";
-import { getOrderToSend } from "../../../sqlServer/querys/notifications/ti/getOrderToSend.js";
-import { getHeaders } from "../../../sqlServer/querys/notifications/ti/getHeaders.js";
-import { tryCatch } from "../../../../globalUtils/tryCatch.js";
-import { db } from "../../../../../database/dbclient.js";
-import { settings } from "../../../../../database/schema/settings.js";
-import { serverData } from "../../../../../database/schema/serverData.js";
-import { eq, sql } from "drizzle-orm";
-import { notifications } from "../../../../../database/schema/notifications.js";
-import { query } from "../../../sqlServer/prodSqlServer.js";
-import { createLog } from "../../../logger/logger.js";
-import { freightClass } from "../../../../globalUtils/freightClass.js";
import { delay } from "../../../../globalUtils/delay.js";
-
-const dateCorrection = (newDate: any) => {
- return new Date(newDate)
- .toLocaleString("en-US", {
- timeZone: "UTC",
- year: "numeric",
- month: "2-digit",
- day: "2-digit",
- hour: "2-digit",
- minute: "2-digit",
- second: "2-digit",
- hourCycle: "h23", // Ensures 24-hour format
- })
- .replace(",", "");
-};
-
-const tiImport = async () => {
- //await initializePool();
-
- // get the plant token
- const { data: plantData, error: plantError } = await tryCatch(
- db.select().from(settings)
- );
- //await initializePool();
- if (plantError) return;
- const plantToken = plantData?.filter((n) => n.name === "plantToken");
-
- const { data: plantInfo, error: plantEr } = await tryCatch(
- db
- .select()
- .from(serverData)
- .where(eq(serverData.plantToken, plantToken[0].value))
- );
-
- // parsing posting window
- const plantI = plantInfo!;
- //const postTime = JSON.parse(plantI[0]?.tiPostTime!);
-
- // order notifications
- const { data: notificationSet, error: notificationSettingsErr } =
- await tryCatch(
- db
- .select()
- .from(notifications)
- .where(eq(notifications.name, "tiIntergration"))
- );
- if (notificationSettingsErr) return;
-
- const notiSet: any = notificationSet;
- //creds
- const userid = "ALPLAWSTEST";
- const password = "oe39U1LuLX9ZdY0XKobG";
-
- // const requestID = `ALPLAPBTEST1`; // production will be alpla01-dateTime - this will be the time it was sent over.
- const requestUser = "ALPLAWSTEST"; // if alplaprod_rs -- confirm we can use a user name vs the AlplapIMPORT // needs to stay the same as provied
-
- const customerAccountNum = plantI[0].customerTiAcc as string; // ti
-
- // it we dont get anything here we want to make sure we add it in
-
- // get current releaes not in the already sent oders
- let orders = getHeaders;
- orders = orders
- .replaceAll("test1", plantToken[0].value)
- .replaceAll("[from]", notiSet?.notifiySettings.start)
- .replaceAll("[to]", notiSet?.notifiySettings.end)
- .replaceAll(
- "[exclude]",
- notiSet.notifiySettings.processed
- .map((num: any) => `'${num}'`)
- .join(", ")
- );
-
- //console.log(orders);
- let headerPending = [];
- try {
- headerPending = await query(orders, "Ti get open headers");
- } catch (error) {
- console.log(error);
- }
-
- if (headerPending.length === 0) {
- createLog(
- "info",
- "notification",
- "notify",
- "There are no pending orders to be sent over to ti."
- );
- return {
- success: true,
- code: 1,
- message: "There are no pending orders to be sent over to ti.",
- };
- }
-
- createLog(
- "info",
- "notification",
- "notify",
- `There are a total of ${headerPending.length} to send over`
- );
- // update query to have the correct plant token
- let orderToSend = getOrderToSend;
- orderToSend = orderToSend
- .replaceAll("test1", plantToken[0].value)
- .replaceAll("[releaseToProcess]", `'${headerPending[0].releaseNumber}'`)
- .replaceAll("[from]", notiSet.notifiySettings.start)
- .replaceAll("[to]", notiSet.notifiySettings.end);
-
- // console.log(orderToSend);
- let records = [];
- try {
- records = await query(orderToSend, "Ti send order");
- } catch (error) {
- console.log(error);
- }
- //console.log(headerPending.length);
-
- // update the header
- let webHeader = `
- [requestID]
-
-
-
- [requestID].XML
- SOTransportLoader
- [requestUser]
-
- `;
-
- webHeader = webHeader.replaceAll(
- "[requestID]",
- `${records[0].releaseNumber}-${plantToken[0].value}`
- );
- webHeader = webHeader.replaceAll("[requestUser]", requestUser);
-
- // update the special instructions section
- const otherSettings = plantI[0]?.otherSettings as {
- specialInstructions: string;
- active: boolean;
- }[];
-
- const specialInfo = otherSettings[0].specialInstructions.replaceAll(
- "[header]",
- records[0].Header
- );
- // this part will link into the
- let itemGroups = "";
-
- for (let i = 0; i < records.length; i++) {
- let newItem = `
-
-
-
-
- ${(
- records[i].pkgLengh / 25.4
- ).toFixed(2)}
- ${(
- records[i].pkgWidth / 25.4
- ).toFixed(2)}
- ${Math.round(
- records[i].pkgHeight / 25.4
- ).toFixed(2)}
-
- ${`av ${records[i].article} ${records[i].articleAlias}`}
-
- ${freightClass(
- records[i].pkgWeight,
- records[i].pkgLengh,
- records[i].pkgWidth,
- records[i].pkgHeight
- )}
-
-
-
- false
-
-
- ${
- records[i].pkgWeight * records[i].Pallets
- }
-
-
- ${
- records[i].Pallets
- }
-
-
- `;
-
- itemGroups += newItem;
- }
-
- // add the full amount of pallets sending over
- let fullPalToSend = records.reduce(
- (acc: any, o: any) => acc + o.Pallets,
- 0
- );
-
- // rebuild the xml to be properly
- let payload = xmlPayloadTI;
- payload = payload
- .replaceAll(`[WebImportHeader]`, webHeader)
- .replaceAll(`[items]`, itemGroups)
- .replaceAll(`[customerAccountNum]`, customerAccountNum)
- .replaceAll("[fullTotalPal]", fullPalToSend);
-
- // update the main release
- //[loadNumber],[shipNumber]
- payload = payload.replaceAll(`[shipNumber]`, records[0].releaseNumber);
- payload = payload.replaceAll(`[loadNumber]`, records[0].releaseNumber);
-
- // do the multie release if needed
- // [multieReleaseNumber]
-
- let multiRelease = ``;
- if (records.length > 0) {
- for (let i = 0; i < records.length; i++) {
- const newRelease = `
- ${records[i].releaseNumber}`;
- multiRelease += newRelease;
- }
-
- payload = payload.replaceAll("[multieReleaseNumber]", multiRelease);
- } else {
- payload = payload.replaceAll("[multieReleaseNumber]", "");
- }
-
- //update the delivery section
- payload = payload.replaceAll(
- "[loadingDate]",
- dateCorrection(records[0].LoadingDate)
- );
-
- payload = payload.replaceAll(
- "[deliveryDate]",
- dateCorrection(records[0].DeliveryDate)
- );
-
- // shipping hours
- //[shippingHoursEarly]
- //[shippingHoursLate]
-
- // update teh shipping hours
-
- const now = new Date();
- const formattedDate = records[0].LoadingDate.toLocaleDateString("en-US", {
- month: "2-digit",
- day: "2-digit",
- year: "numeric",
- });
-
- const shippingHours = JSON.parse(plantI[0]?.shippingHours!);
- //console.log(shippingHours);
-
- payload = payload
- .replaceAll(
- "[shippingHoursEarly]",
- `${formattedDate} ${shippingHours[0].early}`
- )
- .replaceAll(
- "[shippingHoursLate]",
- `${formattedDate} ${shippingHours[0].late}`
- );
-
- payload = payload
- .replaceAll("[plantName]", `Alpla ${plantI[0]?.sName!}`)
- .replaceAll("[plantStreetAddress]", plantI[0]?.streetAddress!)
- .replaceAll("[plantCity]", plantI[0]?.cityState!.split(",")[0])
- .replaceAll("[plantState]", plantI[0]?.cityState!.split(",")[1])
- .replaceAll("[plantZipCode]", plantI[0]?.zipcode!)
- .replaceAll("[contactNum]", plantI[0]?.contactPhone!)
- .replaceAll("[contactEmail]", plantI[0]?.contactEmail!)
-
- // customer info
- .replaceAll("[customerName]", records[0].addressAlias)
- .replaceAll("[customerStreetAddress]", records[0].streetAddress)
- .replaceAll("[customerCity]", records[0].city.split(",")[0])
- .replaceAll("[customerState]", records[0].city.split(",")[1])
- .replaceAll("[customerZip]", records[0].zipCode)
- .replaceAll("[customerPO]", records[0].Header)
- .replaceAll(
- "[glCoding]",
- `52410-${
- records[0].artileType.toLowerCase() === "preform" ||
- records[0].artileType.toLowerCase() === "metalCage"
- ? 31
- : plantI[0].greatPlainsPlantCode
- }`
- ) // {"52410 - " + (artileType.toLowerCase() === "preform" || artileType.toLowerCase() === "metalCage" ? 31: plantInfo[0].greatPlainsPlantCode)}
- .replaceAll(
- "[pfc]",
- `${
- records[0].artileType.toLowerCase() === "preform" ||
- records[0].artileType.toLowerCase() === "metalCage"
- ? 40
- : records[0].costCenter
- }`
- );
-
- // special instructions
- if (otherSettings[0].specialInstructions.length != 0) {
- payload = payload.replaceAll("[specialInstructions]", specialInfo);
- }
-
- // update the carrier info if any is needed.
-
- // check the address has a real carrier on it and change to true and put the sacs code in
- const hasCarrier = true;
-
- console.log(
- `Checking if ${records[0].addressAlias} has scac: ${
- records[0].remark.split(",")[0] ? "there was one" : "no scac"
- }`
- );
-
- const priceSheet = `
-
-
-
- ${
- records[0].remark.split(",")[0]
- ? `${records[0].remark
- .split(",")[0]
- .split(":")[1]
- .toUpperCase()}`
- : ``
- }
-
-
-
- `;
-
- payload = payload.replaceAll("[priceSheet]", priceSheet);
- // console.log(payload);
- //await closePool();
-
- //put the xml into a form
- const formBody = querystring.stringify({
- userid,
- password,
- request: payload,
- });
- axios
- .post(
- "https://t-insightws.mercurygate.net/MercuryGate/common/remoteService.jsp",
- formBody,
- {
- headers: {
- "Content-Type": "application/x-www-form-urlencoded",
- },
- }
- )
- .then((response) => {
- //console.log(response.data)
- console.log("Data was sent over to TI");
- })
- .catch((error) => console.error(error));
-
- // console.log(payload);
-
- // the order is done so we want to update the processed.
-
- // add the new processed order to this
- let notiSettingArray = notiSet.notifiySettings;
-
- if (
- !notiSettingArray[0].processed.includes(headerPending[0].releaseNumber)
- ) {
- notiSettingArray[0].processed.push(headerPending[0].releaseNumber);
- }
-
- const { data, error } = await tryCatch(
- db
- .update(notifications)
- .set({
- lastRan: sql`NOW()`,
- notifiySettings: {
- ...notiSettingArray,
- prodID: 1,
- },
- })
- .where(eq(notifications.name, "tiIntergration"))
- );
- createLog("info", "ti", "notify", "done with this order");
- return { success: true, code: 0, message: "done with this order" };
-};
+import { createLog } from "../../../logger/logger.js";
+import { tiImport } from "./tiFullFlow/tiImport.js";
// add a running check so we cant flag it twice
export let tiExportRunning = false;
+
export const runTiImport = async () => {
let finished = false;
let test: any;
@@ -420,14 +17,20 @@ export const runTiImport = async () => {
"info",
"ti",
"notify",
- `Still more to process? ${test.code === 1 ? "No" : "Yes"}`
+ `Still more to process? ${test.success ? "No" : "Yes"}`
);
- if (test.code === 1) {
+ if (test.success) {
finished = true;
}
+
+ if (!test.success) {
+ //errors are handled in the tiImport function
+ tiExportRunning = false;
+ }
await delay(1000 * 5);
} while (!finished);
tiExportRunning = false;
+ return { success: true, message: "Finished processing all data." };
};
-export default tiImport;
+export default runTiImport;
diff --git a/server/services/notifications/notifyService.ts b/server/services/notifications/notifyService.ts
index e4225c6..7fd1add 100644
--- a/server/services/notifications/notifyService.ts
+++ b/server/services/notifications/notifyService.ts
@@ -9,10 +9,12 @@ import { createLog } from "../logger/logger.js";
import { note, notificationCreate } from "./utils/masterNotifications.js";
import { startNotificationMonitor } from "./utils/processNotifications.js";
import notifyStats from "./routes/getActiveNotifications.js";
+import tiTrigger from "./routes/manualTiggerTi.js";
+import blocking from "./routes/qualityBlocking.js";
const app = new OpenAPIHono();
-const routes = [sendemail, notifyStats] as const;
+const routes = [sendemail, notifyStats, tiTrigger, blocking] as const;
const appRoutes = routes.forEach((route) => {
app.route("/notify", route);
@@ -41,21 +43,9 @@ if (notesError) {
);
}
-if (note.length != notes?.length) {
+setTimeout(() => {
notificationCreate();
- createLog("info", "notify", "notify", `New notifcations being added.`);
- setTimeout(() => {
- startNotificationMonitor();
- }, 5 * 1000);
-} else {
- createLog(
- "info",
- "notify",
- "notify",
- `There are know new notifcations. no need to run the update. reminder all changes happen per server.`
- );
- setTimeout(() => {
- startNotificationMonitor();
- }, 5 * 1000);
-}
+ startNotificationMonitor();
+}, 5 * 1000);
+
export default app;
diff --git a/server/services/notifications/routes/manualTiggerTi.ts b/server/services/notifications/routes/manualTiggerTi.ts
index 1197918..a81d2f5 100644
--- a/server/services/notifications/routes/manualTiggerTi.ts
+++ b/server/services/notifications/routes/manualTiggerTi.ts
@@ -1,22 +1,26 @@
-// import {Router} from "express";
-// import {tiExportRunning, runTiImport} from "../../notification/notification/tiFullFlow/tiImports.js";
+// an external way to creating logs
+import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
+import { responses } from "../../../globalUtils/routeDefs/responses.js";
-// const router = Router();
+import runTiImport from "../controller/notifications/tiIntergration.js";
-// router.get("/tiTrigger", async (req, res): Promise => {
-// if (tiExportRunning) {
-// res.status(200).json({
-// success: false,
-// message: "There is already a current sesion of the Export running please try again later.",
-// });
-// }
+const app = new OpenAPIHono({ strict: false });
-// // trigger the import
-// runTiImport();
-
-// res.status(200).json({
-// success: true,
-// message: "The Ti Export has been manually started and will continue to run in the background.",
-// });
-// });
-// export default router;
+app.openapi(
+ createRoute({
+ tags: ["notify"],
+ summary: "Manually trigger TI intergrations.",
+ method: "get",
+ path: "/tiTrigger",
+ //middleware: authMiddleware,
+ responses: responses(),
+ }),
+ async (c) => {
+ const tiImport = await runTiImport();
+ return c.json({
+ success: tiImport?.success,
+ message: tiImport?.message,
+ });
+ }
+);
+export default app;
diff --git a/server/services/notifications/routes/qualityBlocking.ts b/server/services/notifications/routes/qualityBlocking.ts
new file mode 100644
index 0000000..97ba258
--- /dev/null
+++ b/server/services/notifications/routes/qualityBlocking.ts
@@ -0,0 +1,50 @@
+// an external way to creating logs
+import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
+import { responses } from "../../../globalUtils/routeDefs/responses.js";
+
+import qualityBlockingMonitor from "../controller/notifications/qualityBlocking.js";
+import { tryCatch } from "../../../globalUtils/tryCatch.js";
+
+import { notifications } from "../../../../database/schema/notifications.js";
+import { db } from "../../../../database/dbclient.js";
+import { eq } from "drizzle-orm";
+
+const app = new OpenAPIHono({ strict: false });
+
+app.openapi(
+ createRoute({
+ tags: ["notify"],
+ summary: "Manually trigger TI intergrations.",
+ method: "get",
+ path: "/blockingTrigger",
+ //middleware: authMiddleware,
+ responses: responses(),
+ }),
+ async (c) => {
+ /**
+ * get the blocking notification stuff
+ */
+
+ const { data, error } = await tryCatch(
+ db
+ .select()
+ .from(notifications)
+ .where(eq(notifications.name, "qualityBlocking"))
+ );
+
+ if (error) {
+ return c.json({
+ success: false,
+ message: "Error Getting Notification Settings.",
+ data: error,
+ });
+ }
+ const blocking = await qualityBlockingMonitor(data[0]);
+
+ return c.json({
+ success: blocking?.success,
+ message: blocking?.message,
+ });
+ }
+);
+export default app;
diff --git a/server/services/notifications/utils/masterNotifications.ts b/server/services/notifications/utils/masterNotifications.ts
index bc23a99..66bc906 100644
--- a/server/services/notifications/utils/masterNotifications.ts
+++ b/server/services/notifications/utils/masterNotifications.ts
@@ -31,7 +31,10 @@ export const note: any = [
timeType: "min",
emails: "",
active: false,
- notifiySettings: { prodID: 1, sentBlockingOrders: [1] },
+ notifiySettings: {
+ prodID: 1,
+ sentBlockingOrders: [{ timeStamp: "0", blockingOrder: 1 }],
+ },
},
{
name: "productionCheck",
@@ -72,8 +75,8 @@ export const note: any = [
notifiySettings: {
prodID: 1,
start: 36,
- end: 720,
- releases: [1, 2, 3],
+ end: 36,
+ releases: [{ timeStamp: "0", releaseNumber: 1 }],
},
},
{
@@ -86,7 +89,7 @@ export const note: any = [
notifiySettings: {
prodID: 1,
start: 36,
- end: 720,
+ end: 36,
releases: [1, 2, 3],
},
},
@@ -98,7 +101,14 @@ export const notificationCreate = async () => {
const notify = await db
.insert(notifications)
.values(note[i])
- .onConflictDoNothing();
+ .onConflictDoUpdate({
+ target: notifications.name,
+ set: {
+ name: note[i].name,
+ description: note[i].description,
+ //notifiySettings: note[i].notifiySettings,
+ },
+ });
} catch (error) {
createLog(
"error",
@@ -110,4 +120,10 @@ export const notificationCreate = async () => {
);
}
}
+ createLog(
+ "info",
+ "lst",
+ "nofity",
+ "notifications were just added/updated due to server startup"
+ );
};
diff --git a/server/services/notifications/utils/processNotifications.ts b/server/services/notifications/utils/processNotifications.ts
index 7a04429..40c8091 100644
--- a/server/services/notifications/utils/processNotifications.ts
+++ b/server/services/notifications/utils/processNotifications.ts
@@ -43,23 +43,43 @@ export const startNotificationMonitor = async () => {
if (
!note.active ||
- note.emails === "" ||
+ // note.emails === "" ||
runningNotifications[note.name]
) {
- //console.log(`Skipping ${note.name} hes already scheduled`);
continue;
}
+ if (!runningNotifications[note.name] && note.active) {
+ createLog(
+ "info",
+ "notify",
+ "notify",
+ `${note.name} Is active and not already running.`
+ );
+ }
+
let time = `*/30 * * * *`; // default to be every 30 min
if (note.timeType === "min") {
- console.log(`Creating the min mark here`);
- time = `*/${note.checkInterval} * * * *`;
+ //console.log(`Creating the min mark here`);
+ const totalMinutes = note.checkInterval;
+ if (note.checkInterval > 60) {
+ const hours = Math.floor(totalMinutes / 60); // 1 hour
+ const minutes = totalMinutes % 60; // 45 minutes
+ time = `*/${minutes} */${hours} * * *`;
+ } else {
+ time = `*/${note.checkInterval} * * * *`;
+ }
}
-
if (note.timeType === "hour") {
- console.log(`Creating the hour mark here`);
- time = `* */${note.checkInterval} * * *`;
+ const totalHours = note.checkInterval;
+ if (note.checkInterval > 60) {
+ const days = Math.floor(totalHours / 24); // 1 hour
+ const hours = totalHours % 24; // 45 minutes
+ time = `* */${hours} */${days} * *`;
+ } else {
+ time = `* */${note.checkInterval} * * *`;
+ }
}
createJob(note.name, time, async () => {
diff --git a/server/services/sqlServer/querys/dataMart/customerInventoryQuerys.ts b/server/services/sqlServer/querys/dataMart/customerInventoryQuerys.ts
new file mode 100644
index 0000000..525adb5
--- /dev/null
+++ b/server/services/sqlServer/querys/dataMart/customerInventoryQuerys.ts
@@ -0,0 +1,45 @@
+export const customerInvNoHold = `
+select x.idartikelVarianten as av
+,ArtikelVariantenAlias as Alias
+--x.Lfdnr as RunningNumber,
+--,round(sum(EinlagerungsMengeVPKSum),0) as Total_Pallets
+--,sum(EinlagerungsMengeSum) as Total_PalletQTY
+,round(sum(VerfuegbareMengeVPKSum),0) as Avalible_Pallets
+,sum(VerfuegbareMengeSum) as Avaliable_PalletQTY
+,sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as COA_Pallets
+,sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as COA_QTY
+--,sum(case when c.Description NOT LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as Held_Pallets
+--,sum(case when c.Description NOT LIKE '%COA%' then GesperrteMengeSum else 0 end) as Held_QTY
+,IdProdPlanung as Lot
+--,IdAdressen
+--,x.AdressBez
+--,*
+from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x
+
+left join
+[AlplaPROD_test1].dbo.T_EtikettenGedruckt (nolock) on
+x.Lfdnr = T_EtikettenGedruckt.Lfdnr AND T_EtikettenGedruckt.Lfdnr > 1
+
+left join
+
+(SELECT *
+ FROM [AlplaPROD_test1].[dbo].[T_BlockingDefects] (nolock) where Active = 1) as c
+ on x.IdMainDefect = c.IdBlockingDefect
+/*
+The data below will be controlled by the user in excell by default everything will be passed over
+ IdAdressen = 3
+*/
+where IdArtikelTyp = 1
+and x.IdWarenlager not in (6, 1)
+--and IdAdressen
+
+
+group by x.IdArtikelVarianten
+,ArtikelVariantenAlias
+,IdProdPlanung
+--,c.Description
+,IdAdressen
+,x.AdressBez
+--, x.Lfdnr
+order by x.IdArtikelVarianten
+`;
diff --git a/server/services/sqlServer/querys/dataMart/openOrders.ts b/server/services/sqlServer/querys/dataMart/openOrders.ts
new file mode 100644
index 0000000..faf08dd
--- /dev/null
+++ b/server/services/sqlServer/querys/dataMart/openOrders.ts
@@ -0,0 +1,27 @@
+export const openOrders = `
+Select LEFT(ArtikelVariantenAlias, charindex(' ', ArtikelVariantenAlias) - 1) customerItemNumber,
+x.IdArtikelVarianten AS article,
+ArtikelVariantenAlias AS articleDescription,
+IdAuftragsAbruf as releaseNumber,
+AuftragsNummer AS header,
+AuftragsNummer as customerLineItemNo,
+AbrufNummer AS customerReleaseNumber,
+AbrufMengeVPK AS pallets,
+AbrufMenge AS qty,
+y.TradeUnits AS cartons,
+IdAdresse AS customerID,
+LieferAdressBez as DeliveryAddressDescription,
+AbrufLadeDatum AS loadingDate,
+AbrufLiefertermin AS deliveryDate
+--,OrderStatus = 'loading'
+--,*
+FROM alplaprod_test1.dbo.V_TrackerAuftragsAbrufe (nolock) x
+
+left join
+[test1_AlplaPROD2.0_Read].[order].[Release] (nolock) y on
+x.IdAuftragsAbruf = y.ReleaseNumber
+
+--WHERE AbrufStatus = 1 AND AbrufLiefertermin < getdate() + 5 AND GelieferteMenge = 0
+WHERE AbrufStatus = 1 AND AbrufLiefertermin between getDate() + -[sDay] and getdate() + [eDay] AND GelieferteMenge = 0
+ORDER BY AbrufLiefertermin
+`;
diff --git a/server/services/sqlServer/querys/notifications/blocking.ts b/server/services/sqlServer/querys/notifications/blocking.ts
new file mode 100644
index 0000000..bc71846
--- /dev/null
+++ b/server/services/sqlServer/querys/notifications/blocking.ts
@@ -0,0 +1,45 @@
+export const blockQuery = `
+SELECT
+'Alert! new blocking order: #' + cast(HumanReadableId as varchar) + ' - ' + ArticleVariantDescription as subject,
+cast([HumanReadableId] as varchar) as blockingNumber,
+[ArticleVariantDescription] as article,
+cast([CustomerHumanReadableId] as varchar) + ' - ' + [CustomerDescription] as customer,
+convert(varchar(10), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 101) + ' - ' + convert(varchar(5), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 108) as blockingDate,
+cast(ArticleVariantHumanReadableId as varchar) + ' - ' + ArticleVariantDescription as av,
+case when [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark = '' or [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark is NULL then 'Please reach out to quality for the reason this was placed on hold as a remark was not entered during the blocking processs' else [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark end as remark,
+cast(FORMAT(TotalAmountOfPieces, '###,###') as varchar) + ' / ' + cast(LoadingUnit as varchar) as peicesAndLoadingUnits,
+[test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId as lotNumber,
+cast(IdGlobalBlockingDefectsGroup as varchar) + ' - ' + BD.Description as mainDefectGroup,
+cast(IdGlobalBlockingDefect as varchar) + ' - ' + MD.Description as mainDefect,
+sent=0,
+lot.MachineLocation as line,
+HumanReadableId
+FROM [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder] (nolock)
+
+/*** Join 1.0 table to get correct id info to link ***/
+join
+[AlplaPROD_test1].[dbo].[T_BlockingOrders] (nolock) AS BO
+on [HumanReadableId] = BO.[IdBlockingOrder]
+
+
+/*** Get the main defect info ***/
+Inner join
+[AlplaPROD_test1].[dbo].[T_BlockingDefectsGroups] (nolock) as BD
+ON BO.IdMainDefectGroup = BD.IdBlockingDefectsGroup
+
+INNER join
+[AlplaPROD_test1].[dbo].[T_BlockingDefects] as MD
+ON BO.IdMainDefect = MD.IdBlockingDefect
+/*** get lot info ***/
+
+left join
+(SELECT [MachineLocation]
+,[MachineDescription]
+,[ProductionLotHumanReadableId]
+FROM [test1_AlplaPROD2.0_Reporting].[reporting_productionControlling].[ProducedLot]) as lot
+on [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId = lot.ProductionLotHumanReadableId
+
+where [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate] between getdate() - 1 and getdate() + 1
+and [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].BlockingTrigger = 1
+and HumanReadableId NOT IN ([sentBlockingOrders])
+`;