diff --git a/frontend/src/routes/_old/old/-components/logistics/helperCommands/commands/Relocate.tsx b/frontend/src/routes/_old/old/-components/logistics/helperCommands/commands/Relocate.tsx index ef73ca1..6e6a7a6 100644 --- a/frontend/src/routes/_old/old/-components/logistics/helperCommands/commands/Relocate.tsx +++ b/frontend/src/routes/_old/old/-components/logistics/helperCommands/commands/Relocate.tsx @@ -59,7 +59,7 @@ export default function Relocate() { validators={{ // We can choose between form-wide and field-specific validators onChange: ({ value }) => - value.length > 2 + value.length > 0 ? undefined : "Please enter a valid running number", }} @@ -88,7 +88,7 @@ export default function Relocate() { validators={{ // We can choose between form-wide and field-specific validators onChange: ({ value }) => - value.length > 2 ? undefined : "Please enter a valid lane ID", + value.length > 0 ? undefined : "Please enter a valid lane ID", }} children={(field) => { return ( diff --git a/lstV2/server/services/dataMart/controller/psiGetPlanningData.ts b/lstV2/server/services/dataMart/controller/psiGetPlanningData.ts index a677482..624143a 100644 --- a/lstV2/server/services/dataMart/controller/psiGetPlanningData.ts +++ b/lstV2/server/services/dataMart/controller/psiGetPlanningData.ts @@ -51,6 +51,7 @@ export const psiGetPlanningData = async ( }; } + // TODO: if we are not running planning we no pass the old structure if we are running new planning use the below improved version that makes sure we dont have negative numebrs. articles = data.data; return { diff --git a/lstV2/server/services/dataMart/route/getCurrentQuerys.ts b/lstV2/server/services/dataMart/route/getCurrentQuerys.ts index bf98bb4..1f75a7d 100644 --- a/lstV2/server/services/dataMart/route/getCurrentQuerys.ts +++ b/lstV2/server/services/dataMart/route/getCurrentQuerys.ts @@ -145,7 +145,7 @@ app.openapi( return c.json({ success: true, message: "All Current Active Querys.", - sheetVersion: 2.8, + sheetVersion: 2.8, // TODO: when this gets switched change this data: current, }); }, diff --git a/lstV2/server/services/logistics/controller/commands/relocated.ts b/lstV2/server/services/logistics/controller/commands/relocated.ts index 6d28b5c..d9b5c3a 100644 --- a/lstV2/server/services/logistics/controller/commands/relocated.ts +++ b/lstV2/server/services/logistics/controller/commands/relocated.ts @@ -5,6 +5,7 @@ import { createSSCC } from "../../../../globalUtils/createSSCC.js"; import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { createLog } from "../../../logger/logger.js"; +import { query } from "../../../sqlServer/prodSqlServer.js"; type Data = { runningNr: number; @@ -16,7 +17,32 @@ export const relatePallet = async (data: Data) => { // console.log(data); // create the url to post - const url = await prodEndpointCreation("/public/v1.0/Warehousing/Relocate"); + + // do we have warehousing turned on? + const { data: feature, error: featureError } = (await tryCatch( + query( + `SELECT [Id] + ,[Feature] + ,[Enabled] + ,[ActivationDate] + FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`, + "feature switch check", + ), + )) as any; + + let prodUrl = "/public/v1.0/Warehousing/Relocate"; + if (featureError) { + prodUrl = "/public/v1.0/Warehousing/Relocate"; + } + + if (feature?.data.length > 0) { + prodUrl = "/public/v1.1/Warehousing/Unit/Relocate"; + } + // 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel"," + // 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel" + + let url = await prodEndpointCreation(prodUrl); + const SSCC = await createSSCC(runningNr); const consumeSomething = { ScannerId: 999, @@ -40,7 +66,7 @@ export const relatePallet = async (data: Data) => { }; } - if (results.data.Result !== 0) { + if (results.data.Result !== 0 || results.data.data.length <= 0) { return { success: false, message: results.data.Message, diff --git a/lstV2/server/services/logistics/controller/siloAdjustments/postAdjustment.ts b/lstV2/server/services/logistics/controller/siloAdjustments/postAdjustment.ts index da40d4e..e4daae0 100644 --- a/lstV2/server/services/logistics/controller/siloAdjustments/postAdjustment.ts +++ b/lstV2/server/services/logistics/controller/siloAdjustments/postAdjustment.ts @@ -1,6 +1,7 @@ import axios from "axios"; import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js"; +import { query } from "../../../sqlServer/prodSqlServer.js"; export const postAdjustment = async (data: any) => { if (data.warehouseId === undefined) { @@ -35,9 +36,30 @@ export const postAdjustment = async (data: any) => { quantity: data.quantity, }; - let url = await prodEndpointCreation( - "/public/v1.0/Warehousing/AdjustSiloStockLevel", - ); + // do we have warehousing turned on? + const { data: feature, error: featureError } = (await tryCatch( + query( + `SELECT [Id] + ,[Feature] + ,[Enabled] + ,[ActivationDate] + FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`, + "feature switch check", + ), + )) as any; + + let prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel"; + if (featureError) { + prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel"; + } + + if (feature?.data.length > 0) { + prodUrl = "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"; + } + // 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel"," + // 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel" + + let url = await prodEndpointCreation(prodUrl); const { data: silo, error } = await tryCatch( axios.post(url, siloAdjustment, { diff --git a/lstV2/server/services/notifications/controller/notifications/platToPlantEdi.ts b/lstV2/server/services/notifications/controller/notifications/platToPlantEdi.ts new file mode 100644 index 0000000..c34fda1 --- /dev/null +++ b/lstV2/server/services/notifications/controller/notifications/platToPlantEdi.ts @@ -0,0 +1,160 @@ +import { eq, sql } from "drizzle-orm"; +import { db } from "../../../../../database/dbclient.js"; +import { notifications } from "../../../../../database/schema/notifications.js"; +import { tryCatch } from "../../../../globalUtils/tryCatch.js"; +import { createLog } from "../../../logger/logger.js"; +import { query } from "../../../sqlServer/prodSqlServer.js"; +import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js"; +import { sendEmail } from "../sendMail.js"; + +export default async function platToPlantEdi(notifyData: any) { + createLog("info", "blocking", "notify", `monitoring ${notifyData.name}`); + + const { data: noti, error: notiError } = await tryCatch( + db + .select() + .from(notifications) + .where(eq(notifications.name, notifyData.name)), + ); + + if (notiError) { + createLog( + "error", + "edi", + "notify", + "Error in getting the notification data", + ); + } + + // get the default emails they can be blank if as we will only add these to the end of the email from the full flow + let emails = notifyData.email ?? ""; + + const checkBol = sqlQuerySelector("checkBol.query"); + + if (!checkBol.success) { + createLog("error", "edi", "notify", "Error in getting the bol query data"); + } + + const pLinkedB = sqlQuerySelector("palletsLinkedToBol.query"); + + if (!pLinkedB.success) { + createLog("error", "edi", "notify", "Error in getting the bol query data"); + } + + let ignoreBols: string[] = notifyData?.notifiySettings?.processedBol ?? []; + + const joinBols = ignoreBols.join(","); + + const { data: b, error: bError } = (await tryCatch( + query( + checkBol?.query + ?.replace("[timeCheck]", notifyData.checkInterval ?? "30") + .replace("[ignoreBols]", joinBols ?? 500) ?? "", + "Check bol", + ), + )) as any; + + if (bError) { + return { + success: false, + message: "Error getting newly created bols", + data: bError, + }; + } + + const planedByBol = new Map(); + + for (const row of b.data) { + if (!planedByBol.has(row.bol)) { + planedByBol.set(row.bol, []); + } + + planedByBol.get(row.bol)!.push(String(row.idladeplanung)); + } + + if (b.data.length > 0) { + // loop each bol in the system and get the bols only + for (const [bolNumber, idList] of planedByBol.entries()) { + //for (const bol of b.data) { + // run the process to get the the pallet numbers + + const joinedIdLadeplanung = idList.join(","); + + //console.log("BOL:", bolNumber); + //console.log("IDLadeplanung string:", joinedIdLadeplanung); + //console.log("IgnoreBols: ", joinBols); + + // now get the pallets that are witing the ladeplanning + const { data: pallets, error: pError } = await tryCatch( + query( + pLinkedB?.query?.replace( + "[palLinkedToBol]", + joinedIdLadeplanung ?? "0", + ) ?? "", + "Get Pallets linked in the bol", + ), + ); + + //console.log(pallets); + + // console.log("Address: ", b.data[0].addressId ?? "0"); + + // if theres no email then just stop. + if (b.data[0].addressId === "") return; + + ignoreBols.push(bolNumber); + if (ignoreBols.length > 15) { + ignoreBols.splice(0, ignoreBols.length - 15); + } + + // get the email address. + const checkBol = sqlQuerySelector("addressInfo.query"); + + const { data: address, error: aError } = (await tryCatch( + query( + checkBol?.query?.replace( + "[customerAddress]", + b.data[0].addressId ?? "0", + ) ?? "", + "Get Pallets linked in the bol", + ), + )) as any; + + // setup the email to be sent :D + const emailSetup = { + email: address.data[0].email, + subject: `New EDI transfer Created for BOL: ${bolNumber}`, + template: "plantToPlantEdi", + context: { + items: pallets?.data ?? [], + bol: bolNumber, + //secondarySetting: notifyData.notifiySettings, + }, + }; + + // send the email + await sendEmail(emailSetup); + + // add the bols to be ignored + await db + .update(notifications) + .set({ + lastRan: sql`NOW()`, + notifiySettings: { processedBol: ignoreBols }, + }) + .where(eq(notifications.name, notifyData.name)); + } + + return { + success: true, + message: "All bols have been processed", + data: [ignoreBols], + }; + } + + return { + success: true, + message: "No new bols have been created", + data: [], + }; +} diff --git a/lstV2/server/services/notifications/controller/sendMail.ts b/lstV2/server/services/notifications/controller/sendMail.ts index 6dde1b6..c445cde 100644 --- a/lstV2/server/services/notifications/controller/sendMail.ts +++ b/lstV2/server/services/notifications/controller/sendMail.ts @@ -45,57 +45,57 @@ export const sendEmail = async (data: any): Promise => { }; } // get the plantToken - const server = settingData.filter((n) => n.name === "server"); + //const server = settingData.filter((n) => n.name === "server"); - if ( - server[0].value === "localhostx" && - process.env.EMAIL_USER && - process.env.EMAIL_PASSWORD - ) { - transporter = nodemailer.createTransport({ - service: "gmail", - host: "smtp.gmail.com", - port: 465, - auth: { - user: process.env.EMAIL_USER, - pass: process.env.EMAIL_PASSWORD, - }, - //debug: true, - }); + // if ( + // server[0].value === "localhostx" && + // process.env.EMAIL_USER && + // process.env.EMAIL_PASSWORD + // ) { + // transporter = nodemailer.createTransport({ + // service: "gmail", + // host: "smtp.gmail.com", + // port: 465, + // auth: { + // user: process.env.EMAIL_USER, + // pass: process.env.EMAIL_PASSWORD, + // }, + // //debug: true, + // }); - // update the from email - fromEmail = process.env.EMAIL_USER; - } else { - // convert to the correct plant token. - //const plantToken = settingData.filter((s) => s.name === "plantToken"); + // // update the from email + // fromEmail = process.env.EMAIL_USER; + //} else { + // convert to the correct plant token. + //const plantToken = settingData.filter((s) => s.name === "plantToken"); - // let host = `${plantToken[0].value}-smtp.alpla.net`; + // let host = `${plantToken[0].value}-smtp.alpla.net`; - // const testServers = ["test1", "test2", "test3"]; + // const testServers = ["test1", "test2", "test3"]; - // if (testServers.includes(plantToken[0].value)) { - // host = "USMCD1-smtp.alpla.net"; - // } + // if (testServers.includes(plantToken[0].value)) { + // host = "USMCD1-smtp.alpla.net"; + // } - // if (plantToken[0].value === "usiow2") { - // host = "USIOW1-smtp.alpla.net"; - // } + // if (plantToken[0].value === "usiow2") { + // host = "USIOW1-smtp.alpla.net"; + // } - transporter = nodemailer.createTransport({ - host: "smtp.azurecomm.net", - port: 587, - //rejectUnauthorized: false, - tls: { - minVersion: "TLSv1.2", - }, - auth: { - user: "donotreply@mail.alpla.com", - pass: process.env.SMTP_PASSWORD, - }, - debug: true, - }); - fromEmail = `DoNotReply@mail.alpla.com`; - } + transporter = nodemailer.createTransport({ + host: "smtp.azurecomm.net", + port: 587, + //rejectUnauthorized: false, + tls: { + minVersion: "TLSv1.2", + }, + auth: { + user: "donotreply@mail.alpla.com", + pass: process.env.SMTP_PASSWORD, + }, + debug: true, + }); + fromEmail = `DoNotReply@mail.alpla.com`; + //} // creating the handlbar options const viewPath = path.resolve( diff --git a/lstV2/server/services/notifications/utils/masterNotifications.ts b/lstV2/server/services/notifications/utils/masterNotifications.ts index 6e23cac..c304934 100644 --- a/lstV2/server/services/notifications/utils/masterNotifications.ts +++ b/lstV2/server/services/notifications/utils/masterNotifications.ts @@ -164,6 +164,16 @@ export const note: any = [ errorCount: 10, // change this to something else or leave blank to use the av type }, }, + { + name: "platToPlantEdi", + description: + "This is the plant to plant edi that will send an edi to the email once it ships, the emails will be for the receiving plants", + checkInterval: 15, + timeType: "min", + emails: "", + active: false, + notifiySettings: { processedBol: [500] }, + }, ]; export const notificationCreate = async () => { diff --git a/lstV2/server/services/notifications/utils/views/plantToPlantEdi.hbs b/lstV2/server/services/notifications/utils/views/plantToPlantEdi.hbs new file mode 100644 index 0000000..4823f1d --- /dev/null +++ b/lstV2/server/services/notifications/utils/views/plantToPlantEdi.hbs @@ -0,0 +1,46 @@ + + + + + + {{!-- --}} + {{> styles}} + + +

All,

+

BOL: {{bol}} was created with the below pallets.

+

Please head to stock and import the pallets via the normal incoming goods process.

+

When encountering a discrepancy in pallets/cages received, please correct this after the pallets have been imported.

+

This due to these being plant to plant the only way to correct this is bring them in then undo the incoming goods process.

+

+ + + + + + + + + {{!-- --}} + + + + {{#each items}} + + + + + + + {{!-- --}} + + {{/each}} + +
Running NumberAVDescriptionLot numberQuantityDowntime finish
{{runningNr}}{{article}}{{alias}}{{lotNumber}}{{qty}}{{dtEnd}}
+
+

Thank you,

+

LST Team

+
+ + + \ No newline at end of file diff --git a/lstV2/server/services/sqlServer/prodSqlServer.ts b/lstV2/server/services/sqlServer/prodSqlServer.ts index 47ca69d..6b4d782 100644 --- a/lstV2/server/services/sqlServer/prodSqlServer.ts +++ b/lstV2/server/services/sqlServer/prodSqlServer.ts @@ -167,23 +167,23 @@ export async function query(queryToRun: string, name: string) { const dbServer = serverSettings.filter( (n: any) => n.name === "dbServer", ) as any; - const serverUp = await checkHostnamePort( - `${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`, - ); + // const serverUp = await checkHostnamePort( + // `${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`, + // ); - if (!serverUp) { - createLog( - "error", - "lst", - "server", - `The sql ${dbServer[0].value} is not reachable`, - ); - return { - success: false, - message: `The sql ${dbServer[0].value} is not reachable`, - data: [], - }; - } + // if (!serverUp) { + // createLog( + // "error", + // "lst", + // "server", + // `Failed to run query due to ${dbServer[0].value} not being reachable.`, + // ); + // return { + // success: false, + // message: `Failed to run query due to ${dbServer[0].value} not being reachable.`, + // data: [], + // }; + // } if (!connected) { createLog( diff --git a/lstV2/server/services/sqlServer/querys/newQueries/addressInfo.query.sql b/lstV2/server/services/sqlServer/querys/newQueries/addressInfo.query.sql new file mode 100644 index 0000000..e272f58 --- /dev/null +++ b/lstV2/server/services/sqlServer/querys/newQueries/addressInfo.query.sql @@ -0,0 +1,14 @@ +use [test3_AlplaPROD2.0_Read] + +select +humanreadableId as addressId +,ContactEmail as email +,ContactName +,ContactPhoneNumber +,Name +,Street +,City +,ZipCode +--,* +from [masterData].[Address] (nolock) +where humanreadableid = [customerAddress] \ No newline at end of file diff --git a/lstV2/server/services/sqlServer/querys/newQueries/checkBol.query.sql b/lstV2/server/services/sqlServer/querys/newQueries/checkBol.query.sql new file mode 100644 index 0000000..e298d47 --- /dev/null +++ b/lstV2/server/services/sqlServer/querys/newQueries/checkBol.query.sql @@ -0,0 +1,43 @@ +use AlplaPROD_test1 + + +/** +check if we have any new alpla bols that were created +*/ + +SELECT +x.idladeplanung +,e.idjournal +,e.journalNummer as bol +,e.idjournalstatus +,e.ladeDatum as loadDate +,e.bemerkung +,e.ereporting_idwerk +,e.journalDatum +,a.idadressen as addressId +,a.bezeichnung as addressDescription +,a.strasse as streetAddress +,a.ort as cityState +,a.plz as zipcode +,idauftrag as releaseNumber +--,* + FROM [dbo].[T_EAIJournal] as e with (nolock) + + + -- pull in the address so we only pull in florence data +left join + [dbo].[T_EAIJournalAdresse] as a with (nolock) on + a.[IdJournalAdresse] = [IdJournalKundenAdresse] + +-- get the table to link the pallets to the bol +left join +[dbo].[T_EAIJournalPosition] as x with (nolock) on +x.idjournal = e.idjournal + +where idjournalStatus = 62 + --and idadressen = 270 + --and a.bezeichnung like '%Alpla%' -- we only want to monitor for addresses that are linked to alpla. + and JournalDatum > DATEADD(MINUTE, -[timeCheck], GETDATE()) + and e.idjournal not in ([ignoreBols]) + and idauftrag > 1 -- this will ignore all incoming goodsv as we are really only looking for outbound deliveries +order by JournalDatum desc \ No newline at end of file diff --git a/lstV2/server/services/sqlServer/querys/newQueries/palletsLinkedToBol.query.sql b/lstV2/server/services/sqlServer/querys/newQueries/palletsLinkedToBol.query.sql new file mode 100644 index 0000000..3a23db3 --- /dev/null +++ b/lstV2/server/services/sqlServer/querys/newQueries/palletsLinkedToBol.query.sql @@ -0,0 +1,36 @@ +use AlplaPROD_test1 + + select * from (SELECT + p.[IdLadePlanung] + ,p.[Beleg] as lotNumber + ,p.[LfdNrJeArtikelKunde] as runningNr + ,p.[Barcode] + ,p.[ProduktionsDatum] as productionDate + ,p.[Add_User] as scanDate + ,p.[Add_Date] + ,p.[Upd_User] + ,p.[Upd_Date] + ,p.[IdJournalWarenPosition] + ,p.[LieferMenge] as qty + -- ,av.IdArtikelvarianten as article + -- ,av.Bezeichnung as alias + ,av.articlehumanreadableid as article + ,av.ArticleDescription as alias + --,[SSCC_ReserveZiffer] + ,ROW_NUMBER() OVER (PARTITION BY p.[LfdNrJeArtikelKunde] ORDER BY p.upd_date DESC) AS RowNum + --,* + + FROM [dbo].[T_EAIJournalLieferPosition] as p (nolock) + +-- left join +-- dbo.T_ProdPlanung as l on +-- l.IdProdPlanung = p.Beleg + +left join +[test3_AlplaPROD2.0_Read].labelling.InternalLabel as av on + av.RunningNumber = p.[LfdNrJeArtikelKunde] +) as a + +where idladeplanung in ([palLinkedToBol]) and RowNum = 1 + +order by runningNr diff --git a/package.json b/package.json index 3ebc9fc..2b04741 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,8 @@ "dev:front": "cd frontend && npm run dev", "dev:db:migrate": "npx drizzle-kit push", "dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts", - "dev": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"", + "dev": "concurrently -n \"server,oldServer\" -c \"#007755, #1F73D1\" \"npm run dev:app\" \"npm run dev:old\"", + "dev:all": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"", "dev:old": "cd lstV2 && npm run dev", "copy:docs": "node scripts/lstDocCopy.mjs", "build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs",