feat(lstv2 move): moved lstv2 into this app to keep them combined and easier to maintain
This commit is contained in:
@@ -0,0 +1,120 @@
|
||||
import axios from "axios";
|
||||
import { commandLog } from "../../../../../database/schema/commandLog.js";
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { lstAuth } from "../../../../index.js";
|
||||
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import net from "net";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
|
||||
import { settings } from "../../../../../database/schema/settings.js";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { serverData } from "../../../../../database/schema/serverData.js";
|
||||
export const removeAsNonReusable = async (data: any) => {
|
||||
// const removalUrl = await prodEndpointCreation(
|
||||
// "/public/v1.0/Warehousing/RemoveAsNonReusableMaterial"
|
||||
// );
|
||||
|
||||
// const sscc = await createSSCC(data.runningNr);
|
||||
|
||||
// const { data: remove, error } = await tryCatch(
|
||||
// axios.post(
|
||||
// removalUrl,
|
||||
// { scannerId: "500", sscc: sscc.slice(2) },
|
||||
// {
|
||||
// headers: { Authorization: `Basic ${lstAuth}` },
|
||||
// }
|
||||
// )
|
||||
// );
|
||||
|
||||
// use a scanner tcp connection to trigger this process
|
||||
const STX = "\x02";
|
||||
const ETX = "\x03";
|
||||
const scanner = new net.Socket();
|
||||
let stage = 0;
|
||||
// get the label info
|
||||
const { data: label, error: labelError } = (await tryCatch(
|
||||
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info")
|
||||
)) as any;
|
||||
|
||||
if (label.data[0].stockStatus === "notOnStock") {
|
||||
return {
|
||||
success: false,
|
||||
message: `The label: ${data.runningNr} is not currently in stock`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
// get the server ip based on the token.
|
||||
const setting = await db.select().from(settings);
|
||||
|
||||
const plantInfo = await db.select().from(serverData);
|
||||
const plantToken = setting.filter((n: any) => n.name === "plantToken");
|
||||
const scannerID = setting.filter((n: any) => n.name === "scannerID");
|
||||
const scannerPort = setting.filter((n: any) => n.name === "scannerPort");
|
||||
const plantData = plantInfo.filter(
|
||||
(p: any) => p.plantToken === plantToken[0].value
|
||||
);
|
||||
|
||||
scanner.connect(
|
||||
parseInt(scannerPort[0].value),
|
||||
plantData[0].idAddress!,
|
||||
async () => {
|
||||
// need to get the ip from the server data and scanner port
|
||||
//console.log(`connected to scanner`);
|
||||
scanner.write(`${STX}${scannerID[0].value}@AlplaPRODcmd23${ETX}`);
|
||||
}
|
||||
);
|
||||
scanner.on("data", (data) => {
|
||||
const response = data.toString();
|
||||
//console.log("Received:", response.trimStart());
|
||||
if (stage === 0) {
|
||||
stage = 1;
|
||||
scanner.write(
|
||||
`${STX}${scannerID[0].value}@${label.data[0].Barcode}${ETX}`
|
||||
);
|
||||
} else if (stage === 1) {
|
||||
scanner.end();
|
||||
}
|
||||
});
|
||||
scanner.on("close", () => {
|
||||
//console.log("Connection closed");
|
||||
scanner.destroy();
|
||||
});
|
||||
scanner.on("error", (err) => {
|
||||
//console.error("Scanner error:", err);
|
||||
scanner.destroy();
|
||||
return {
|
||||
success: false,
|
||||
message: `The label: ${data.runningNr} encountering an error while being removed, please try again`,
|
||||
data: [],
|
||||
};
|
||||
});
|
||||
|
||||
// if (error) {
|
||||
// //console.log(error);
|
||||
// return {
|
||||
// success: false,
|
||||
// message: `There was an error removing ${data.runningNr}`,
|
||||
// data: [],
|
||||
// };
|
||||
// }
|
||||
|
||||
let reason = data.reason || "";
|
||||
delete data.reason;
|
||||
|
||||
const { data: commandL, error: ce } = await tryCatch(
|
||||
db.insert(commandLog).values({
|
||||
commandUsed: "removeAsNonReusable",
|
||||
bodySent: data,
|
||||
reasonUsed: reason,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `The label: ${data.runningNr}, was removed`,
|
||||
data: [],
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,34 @@
|
||||
import * as XLSX from "xlsx";
|
||||
|
||||
export const standardForCastTemplate = async () => {
|
||||
/**
|
||||
* Creates the standard Template for bulk orders in
|
||||
*/
|
||||
|
||||
const headers = [
|
||||
["CustomerArticleNumber", "Quantity", "RequirementDate", "CustomerID"],
|
||||
];
|
||||
|
||||
// create a new workbook
|
||||
const wb = XLSX.utils.book_new();
|
||||
const ws = XLSX.utils.aoa_to_sheet(headers);
|
||||
//const ws2 = XLSX.utils.aoa_to_sheet(headers2);
|
||||
|
||||
const columnWidths = headers[0].map((header) => ({
|
||||
width: header.length + 2,
|
||||
}));
|
||||
|
||||
ws["!cols"] = columnWidths;
|
||||
|
||||
// append the worksheet to the workbook
|
||||
XLSX.utils.book_append_sheet(wb, ws, `Sheet1`);
|
||||
//XLSX.utils.book_append_sheet(wb, ws2, `Sheet2`);
|
||||
|
||||
// Write the excel file and trigger the download'
|
||||
XLSX.writeFile(wb, "BulkForecastTemplate");
|
||||
|
||||
// Write the workbook to a buffer and return it
|
||||
const excelBuffer = XLSX.write(wb, { bookType: "xlsx", type: "buffer" });
|
||||
|
||||
return excelBuffer;
|
||||
};
|
||||
@@ -0,0 +1,48 @@
|
||||
import { lorealForecast } from "./mappings/loralForecast.js";
|
||||
import { pNgForecast } from "./mappings/pNgForecast.js";
|
||||
import { standardForecast } from "./mappings/standardForcast.js";
|
||||
|
||||
export const forecastIn = async (data: any, user: any) => {
|
||||
/**
|
||||
* Bulk orders in, and custom file parsing.
|
||||
*/
|
||||
|
||||
let success = true;
|
||||
let message = "";
|
||||
let orderData: any = [];
|
||||
|
||||
// what type of order are we dealing with?
|
||||
if (data["fileType"] === "standard") {
|
||||
//run the standard forecast in
|
||||
const standard = await standardForecast(data["postForecast"], user);
|
||||
success = standard.success ?? false;
|
||||
message = standard.message ?? "Error posting standard forecast";
|
||||
orderData = standard.data;
|
||||
}
|
||||
|
||||
if (data["fileType"] === "energizer") {
|
||||
// orders in
|
||||
}
|
||||
|
||||
if (data["fileType"] === "loreal") {
|
||||
//run the standard forecast in
|
||||
const loreal = await lorealForecast(data["postForecast"], user);
|
||||
success = loreal.success ?? false;
|
||||
message = loreal.message ?? "Error posting standard forecast";
|
||||
orderData = loreal.data;
|
||||
}
|
||||
|
||||
if (data["fileType"] === "pg") {
|
||||
//run the standard forecast in
|
||||
const pg = await pNgForecast(data["postForecast"], user);
|
||||
success = pg.success ?? false;
|
||||
message = pg.message ?? "Error posting standard forecast";
|
||||
orderData = pg.data;
|
||||
}
|
||||
|
||||
return {
|
||||
success,
|
||||
message,
|
||||
data: orderData,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,286 @@
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
|
||||
import XLSX from "xlsx";
|
||||
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
|
||||
import { postForecast } from "../postForecast.js";
|
||||
import { query } from "../../../../../sqlServer/prodSqlServer.js";
|
||||
import { activeArticle } from "../../../../../sqlServer/querys/dataMart/article.js";
|
||||
import { addDays } from "date-fns";
|
||||
import { sendEmail } from "../../../../../notifications/controller/sendMail.js";
|
||||
import { createLog } from "../../../../../logger/logger.js";
|
||||
|
||||
let customerID = 4;
|
||||
export const lorealForecast = async (data: any, user: any) => {
|
||||
/**
|
||||
* Post a standard forecast based on the standard template.
|
||||
*/
|
||||
|
||||
const { data: s, error: e } = await tryCatch(db.select().from(settings));
|
||||
|
||||
if (e) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting settings`,
|
||||
data: e,
|
||||
};
|
||||
}
|
||||
|
||||
const plantToken = s.filter((s) => s.name === "plantToken");
|
||||
|
||||
const arrayBuffer = await data.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const workbook = XLSX.read(buffer, { type: "buffer" });
|
||||
|
||||
const sheet: any = workbook.Sheets["Alpla HDPE"];
|
||||
const range = XLSX.utils.decode_range(sheet["!ref"]);
|
||||
|
||||
const psheet: any = workbook.Sheets["Alpla PET"];
|
||||
const prange = XLSX.utils.decode_range(psheet["!ref"]);
|
||||
|
||||
const headers = [];
|
||||
for (let C = range.s.c; C <= range.e.c; ++C) {
|
||||
const cellAddress = XLSX.utils.encode_cell({ r: 1, c: C }); // row 0 = Excel row 1
|
||||
const cell = sheet[cellAddress];
|
||||
headers.push(cell ? cell.v : undefined);
|
||||
}
|
||||
|
||||
const pheaders = [];
|
||||
for (let C = prange.s.c; C <= prange.e.c; ++C) {
|
||||
const cellAddress = XLSX.utils.encode_cell({ r: 1, c: C }); // row 0 = Excel row 1
|
||||
const cell = psheet[cellAddress];
|
||||
pheaders.push(cell ? cell.v : undefined);
|
||||
}
|
||||
|
||||
const ebmForeCastData: any = XLSX.utils.sheet_to_json(sheet, {
|
||||
defval: "",
|
||||
header: headers,
|
||||
range: 3,
|
||||
});
|
||||
|
||||
const petForeCastData: any = XLSX.utils.sheet_to_json(psheet, {
|
||||
defval: "",
|
||||
header: pheaders,
|
||||
range: 3,
|
||||
});
|
||||
|
||||
const ebmForecastData: any = [];
|
||||
const missingSku: any = [];
|
||||
|
||||
const { data: a, error: ae } = await tryCatch(
|
||||
query(activeArticle, "Loreal calling active av")
|
||||
);
|
||||
|
||||
if (ae) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error getting active av",
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
const article: any = a?.data;
|
||||
|
||||
// process the ebm forcast
|
||||
for (let i = 0; i < ebmForeCastData.length; i++) {
|
||||
// bottle code
|
||||
const sku = ebmForeCastData[i]["HDPE Bottle Code"];
|
||||
|
||||
// ignore the blanks
|
||||
if (sku === "") continue;
|
||||
|
||||
// ignore zero qty
|
||||
// if (ebmForeCastData[i][`Day ${i}`]) continue;
|
||||
|
||||
for (let f = 0; f <= 90; f++) {
|
||||
const day = `Day ${f + 1}`;
|
||||
// if (ebmForeCastData[i][day] === 0) continue;
|
||||
|
||||
const forcast = {
|
||||
customerArticleNo: sku,
|
||||
requirementDate: addDays(new Date(Date.now()), f), //excelDateStuff(parseInt(date)),
|
||||
quantity: ebmForeCastData[i][day] ?? 0,
|
||||
};
|
||||
|
||||
if (forcast.quantity === 0) continue;
|
||||
|
||||
// checking to make sure there is a real av to add to.
|
||||
const activeAV = article.filter(
|
||||
(c: any) =>
|
||||
c?.CustomerArticleNumber ===
|
||||
forcast.customerArticleNo.toString()
|
||||
);
|
||||
|
||||
if (activeAV.length === 0) {
|
||||
if (typeof forcast.customerArticleNo === "number") {
|
||||
missingSku.push(forcast);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
ebmForecastData.push(forcast);
|
||||
}
|
||||
|
||||
//console.log(ebmForeCastData.length);
|
||||
}
|
||||
|
||||
// petForeCastData.forEach((item: any, index: any) => {
|
||||
// //console.log(`Processing item ${index + 1} of ${forecastData.length}`);
|
||||
|
||||
// // Extract the customer code
|
||||
// const customerCode = item["SOUTH PET BOTTLES"];
|
||||
|
||||
// // Process each date in the current object
|
||||
// for (const [date, qty] of Object.entries(item)) {
|
||||
// // Skip metadata fields
|
||||
// if (petMetadataFields.includes(date)) continue;
|
||||
|
||||
// if (qty === 0) continue;
|
||||
|
||||
// // Create your transformed record
|
||||
// const record = {
|
||||
// customerArticleNo: customerCode,
|
||||
// requirementDate: excelDateStuff(parseInt(date)),
|
||||
// quantity: qty,
|
||||
// };
|
||||
|
||||
// // Do something with this record
|
||||
// petForecastData.push(record);
|
||||
// }
|
||||
// });
|
||||
|
||||
// pet forecast
|
||||
for (let i = 0; i < petForeCastData.length; i++) {
|
||||
// bottle code
|
||||
const sku = petForeCastData[i]["South PET Bottle Code"];
|
||||
|
||||
// ignore the blanks
|
||||
if (sku === "") continue;
|
||||
|
||||
// ignore zero qty
|
||||
// if (ebmForeCastData[i][`Day ${i}`]) continue;
|
||||
|
||||
for (let f = 0; f <= 90; f++) {
|
||||
const day = `Day ${f + 1}`;
|
||||
// if (ebmForeCastData[i][day] === 0) continue;
|
||||
|
||||
const forcast = {
|
||||
customerArticleNo: sku,
|
||||
requirementDate: addDays(new Date(Date.now()), f), //excelDateStuff(parseInt(date)),
|
||||
quantity: petForeCastData[i][day] ?? 0,
|
||||
};
|
||||
|
||||
if (forcast.quantity === 0 || forcast.quantity === "") continue;
|
||||
|
||||
if (forcast.customerArticleNo < 99999) {
|
||||
//console.log(`Sku a normal av ${forcast.customerArticleNo}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// checking to make sure there is a real av to add to.
|
||||
const activeAV = article.filter(
|
||||
(c: any) =>
|
||||
c?.CustomerArticleNumber ===
|
||||
forcast.customerArticleNo.toString()
|
||||
);
|
||||
|
||||
if (activeAV.length === 0) {
|
||||
if (typeof forcast.customerArticleNo === "number") {
|
||||
missingSku.push(forcast);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
ebmForecastData.push(forcast);
|
||||
}
|
||||
}
|
||||
|
||||
//console.log(comForecast);
|
||||
|
||||
// email the for the missing ones
|
||||
const missedGrouped = Object.values(
|
||||
missingSku.reduce((acc: any, item: any) => {
|
||||
const key = item.customerArticleNo;
|
||||
|
||||
if (!acc[key]) {
|
||||
// first time we see this customer
|
||||
acc[key] = item;
|
||||
} else {
|
||||
// compare dates and keep the earliest
|
||||
if (
|
||||
new Date(item.requirementDate) <
|
||||
new Date(acc[key].requirementDate)
|
||||
) {
|
||||
acc[key] = item;
|
||||
}
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {})
|
||||
);
|
||||
|
||||
const emailSetup = {
|
||||
email: "Blake.matthes@alpla.com; Stuart.Gladney@alpla.com; Harold.Mccalister@alpla.com; Jenn.Osbourn@alpla.com",
|
||||
subject:
|
||||
missedGrouped.length > 0
|
||||
? `Alert! There are ${missedGrouped.length}, missing skus.`
|
||||
: `Alert! There is a missing SKU.`,
|
||||
template: "missingLorealSkus",
|
||||
context: {
|
||||
items: missedGrouped,
|
||||
},
|
||||
};
|
||||
|
||||
const { data: sentEmail, error: sendEmailError } = await tryCatch(
|
||||
sendEmail(emailSetup)
|
||||
);
|
||||
if (sendEmailError) {
|
||||
createLog(
|
||||
"error",
|
||||
"blocking",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval"
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: "Failed to send email, will try again on next interval",
|
||||
};
|
||||
}
|
||||
|
||||
// if the customerarticle number is not matching just ignore it
|
||||
const predefinedObject = {
|
||||
receivingPlantId: plantToken[0].value,
|
||||
documentName: `ForecastFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
sender: user.username || "lst-system",
|
||||
customerId: customerID,
|
||||
positions: [],
|
||||
};
|
||||
|
||||
let updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
positions: [
|
||||
...predefinedObject.positions,
|
||||
...ebmForecastData,
|
||||
|
||||
// ...ebmForecastData.filter(
|
||||
// (q: any) =>
|
||||
// q.customerArticleNo != "" && q.customerArticleNo != "Total"
|
||||
// ),
|
||||
// ...petForecastData.filter(
|
||||
// (q: any) =>
|
||||
// q.customerArticleNo != "" && q.customerArticleNo != "Total"
|
||||
// ),
|
||||
],
|
||||
};
|
||||
// console.log(updatedPredefinedObject);
|
||||
const posting: any = await postForecast(updatedPredefinedObject, user);
|
||||
|
||||
return {
|
||||
success: posting.success,
|
||||
message: posting.message,
|
||||
data: posting.data === "" ? ebmForecastData : posting.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,164 @@
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
|
||||
import XLSX from "xlsx";
|
||||
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
|
||||
import { postForecast } from "../postForecast.js";
|
||||
import { query } from "../../../../../sqlServer/prodSqlServer.js";
|
||||
import { activeArticle } from "../../../../../sqlServer/querys/dataMart/article.js";
|
||||
|
||||
export const pNgForecast = async (data: any, user: any) => {
|
||||
/**
|
||||
* Post a standard forecast based on the standard template.
|
||||
*/
|
||||
|
||||
const { data: s, error: e } = await tryCatch(db.select().from(settings));
|
||||
|
||||
if (e) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting settings`,
|
||||
data: e,
|
||||
};
|
||||
}
|
||||
|
||||
const pNg = s.filter((n: any) => n.name === "pNgAddress");
|
||||
|
||||
const { data: a, error: ae } = await tryCatch(
|
||||
query(activeArticle, "p&g active av")
|
||||
);
|
||||
|
||||
if (ae) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error getting active av",
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
const article: any = a?.data;
|
||||
|
||||
const plantToken = s.filter((s) => s.name === "plantToken");
|
||||
|
||||
const arrayBuffer = await data.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const workbook = XLSX.read(buffer, { type: "buffer" });
|
||||
|
||||
const sheetName = workbook.SheetNames[0];
|
||||
//const sheet: any = workbook.Sheets[sheetName];
|
||||
const sheet: any = workbook.Sheets["SchedAgreementUIConfigSpreadshe"];
|
||||
const range = XLSX.utils.decode_range(sheet["!ref"]);
|
||||
|
||||
const headers = [];
|
||||
for (let C = range.s.c; C <= range.e.c; ++C) {
|
||||
const cellAddress = XLSX.utils.encode_cell({ r: 0, c: C }); // row 0 = Excel row 1
|
||||
const cell = sheet[cellAddress];
|
||||
headers.push(cell ? cell.v : undefined);
|
||||
}
|
||||
|
||||
//console.log(headers);
|
||||
const forecastData: any = XLSX.utils.sheet_to_json(sheet, {
|
||||
defval: "",
|
||||
header: headers,
|
||||
range: 1,
|
||||
});
|
||||
|
||||
const groupedByCustomer: any = forecastData.reduce(
|
||||
(acc: any, item: any) => {
|
||||
const id = item.CustomerID;
|
||||
if (!acc[id]) {
|
||||
acc[id] = [];
|
||||
}
|
||||
acc[id].push(item);
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
const foreCastData: any = [];
|
||||
|
||||
for (const [customerID, forecast] of Object.entries(groupedByCustomer)) {
|
||||
//console.log(`Running for Customer ID: ${customerID}`);
|
||||
const newForecast: any = forecast;
|
||||
|
||||
const predefinedObject = {
|
||||
receivingPlantId: plantToken[0].value,
|
||||
documentName: `ForecastFromLST-${new Date(
|
||||
Date.now()
|
||||
).toLocaleString("en-US")}`,
|
||||
sender: user.username || "lst-system",
|
||||
customerId: pNg[0].value,
|
||||
positions: [],
|
||||
};
|
||||
|
||||
// map everything out for each order
|
||||
const nForecast = newForecast.map((o: any) => {
|
||||
// const invoice = i.filter(
|
||||
// (i: any) => i.deliveryAddress === parseInt(customerID)
|
||||
// );
|
||||
// if (!invoice) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
return {
|
||||
customerArticleNo: parseInt(o["Customer Item No."]),
|
||||
requirementDate: excelDateStuff(parseInt(o["Request Date"])),
|
||||
quantity: o["Remaining Qty to be Shipped"],
|
||||
};
|
||||
});
|
||||
|
||||
// check to make sure the av belongs in this plant.
|
||||
const onlyNumbers = nForecast.filter((n: any) => n.quantity > 0);
|
||||
const filteredForecast: any = [];
|
||||
|
||||
for (let i = 0; i < nForecast.length; i++) {
|
||||
//console.log(nForecast[i].customerArticleNo);
|
||||
const activeAV = article.filter(
|
||||
(c: any) =>
|
||||
c?.CustomerArticleNumber ===
|
||||
nForecast[i]?.customerArticleNo.toString() &&
|
||||
// validate it works via the default address
|
||||
c?.IdAdresse === parseInt(pNg[0].value)
|
||||
);
|
||||
|
||||
if (activeAV.length > 0) {
|
||||
filteredForecast.push(onlyNumbers[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (filteredForecast.length === 0) {
|
||||
console.log("Nothing to post");
|
||||
return {
|
||||
success: true,
|
||||
message: "No forecast to be posted",
|
||||
data: foreCastData,
|
||||
};
|
||||
}
|
||||
|
||||
// do that fun combining thing
|
||||
let updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
positions: [...predefinedObject.positions, ...filteredForecast],
|
||||
};
|
||||
|
||||
//console.log(updatedPredefinedObject);
|
||||
|
||||
// post the orders to the server
|
||||
const posting: any = await postForecast(updatedPredefinedObject, user);
|
||||
|
||||
foreCastData.push({
|
||||
customer: customerID,
|
||||
//totalOrders: orders?.length(),
|
||||
success: posting.success,
|
||||
message: posting.message,
|
||||
data: posting.data,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Forecast Posted",
|
||||
data: foreCastData,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,114 @@
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
|
||||
import XLSX from "xlsx";
|
||||
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
|
||||
import { postForecast } from "../postForecast.js";
|
||||
|
||||
export const standardForecast = async (data: any, user: any) => {
|
||||
/**
|
||||
* Post a standard forecast based on the standard template.
|
||||
*/
|
||||
|
||||
const { data: s, error: e } = await tryCatch(db.select().from(settings));
|
||||
|
||||
if (e) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting settings`,
|
||||
data: e,
|
||||
};
|
||||
}
|
||||
|
||||
const plantToken = s.filter((s) => s.name === "plantToken");
|
||||
|
||||
const arrayBuffer = await data.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const workbook = XLSX.read(buffer, { type: "buffer" });
|
||||
|
||||
const sheetName = workbook.SheetNames[0];
|
||||
const sheet = workbook.Sheets[sheetName];
|
||||
|
||||
const headers = [
|
||||
"CustomerArticleNumber",
|
||||
"Quantity",
|
||||
"RequirementDate",
|
||||
"CustomerID",
|
||||
];
|
||||
|
||||
const forecastData: any = XLSX.utils.sheet_to_json(sheet, {
|
||||
defval: "",
|
||||
header: headers,
|
||||
range: 1,
|
||||
});
|
||||
|
||||
const groupedByCustomer: any = forecastData.reduce(
|
||||
(acc: any, item: any) => {
|
||||
const id = item.CustomerID;
|
||||
if (!acc[id]) {
|
||||
acc[id] = [];
|
||||
}
|
||||
acc[id].push(item);
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
const foreCastData: any = [];
|
||||
|
||||
for (const [customerID, forecast] of Object.entries(groupedByCustomer)) {
|
||||
//console.log(`Running for Customer ID: ${customerID}`);
|
||||
const newForecast: any = forecast;
|
||||
|
||||
const predefinedObject = {
|
||||
receivingPlantId: plantToken[0].value,
|
||||
documentName: `ForecastFromLST-${new Date(
|
||||
Date.now()
|
||||
).toLocaleString("en-US")}`,
|
||||
sender: user.username || "lst-system",
|
||||
customerId: customerID,
|
||||
positions: [],
|
||||
};
|
||||
|
||||
// map everything out for each order
|
||||
const nForecast = newForecast.map((o: any) => {
|
||||
// const invoice = i.filter(
|
||||
// (i: any) => i.deliveryAddress === parseInt(customerID)
|
||||
// );
|
||||
// if (!invoice) {
|
||||
// return;
|
||||
// }
|
||||
return {
|
||||
customerArticleNo: o.CustomerArticleNumber,
|
||||
requirementDate: excelDateStuff(parseInt(o.RequirementDate)),
|
||||
quantity: o.Quantity,
|
||||
};
|
||||
});
|
||||
|
||||
// do that fun combining thing
|
||||
let updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
positions: [...predefinedObject.positions, ...nForecast],
|
||||
};
|
||||
|
||||
//console.log(updatedPredefinedObject);
|
||||
|
||||
// post the orders to the server
|
||||
const posting: any = await postForecast(updatedPredefinedObject, user);
|
||||
|
||||
foreCastData.push({
|
||||
customer: customerID,
|
||||
//totalOrders: orders?.length(),
|
||||
success: posting.success,
|
||||
message: posting.message,
|
||||
data: posting.data,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Forecast Posted",
|
||||
data: foreCastData,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,76 @@
|
||||
import axios from "axios";
|
||||
import { prodEndpointCreation } from "../../../../../globalUtils/createUrl.js";
|
||||
import { createLog } from "../../../../logger/logger.js";
|
||||
|
||||
export const postForecast = async (data: any, user: any) => {
|
||||
let endpoint = await prodEndpointCreation(
|
||||
"/public/v1.0/DemandManagement/DELFOR"
|
||||
);
|
||||
|
||||
//console.log(endpoint);
|
||||
//console.log(req.body.orders[0]);
|
||||
try {
|
||||
const results = await axios({
|
||||
url: endpoint,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": process.env.TEC_API_KEY || "",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
// if a body is sent over it would be like below
|
||||
data: data,
|
||||
});
|
||||
|
||||
//console.log(results.data);
|
||||
//console.log(results.status);
|
||||
if (results.data.errors) {
|
||||
createLog(
|
||||
"error",
|
||||
"forecast",
|
||||
"logistics",
|
||||
`There was an error posting the Forecast: ${JSON.stringify(
|
||||
results.data.errors
|
||||
)}`
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: "Error processing forecast",
|
||||
data: results.data.errors[0].message,
|
||||
};
|
||||
}
|
||||
|
||||
if (results.status === 200) {
|
||||
createLog(
|
||||
"info",
|
||||
"forecast",
|
||||
"logistics",
|
||||
`Forcast was successfully posted: ${JSON.stringify(
|
||||
results.data
|
||||
)}`
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: "Success on posting forecast",
|
||||
data: results.data,
|
||||
};
|
||||
}
|
||||
} catch (error: any) {
|
||||
//console.log(`There is an error`, error);
|
||||
if (error) {
|
||||
//console.log(error.response.data);
|
||||
createLog(
|
||||
"error",
|
||||
"forecast",
|
||||
"logistics",
|
||||
`There was an error posting the Forecast: ${JSON.stringify(
|
||||
error.response.data
|
||||
)}`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error posting the Forecast",
|
||||
data: error.response.data,
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,44 @@
|
||||
import * as XLSX from "xlsx";
|
||||
|
||||
export const standardTemplate = async () => {
|
||||
/**
|
||||
* Creates the standard Template for bulk orders in
|
||||
*/
|
||||
|
||||
const headers = [
|
||||
[
|
||||
"CustomerArticleNumber",
|
||||
"CustomerOrderNumber",
|
||||
"CustomerLineNumber",
|
||||
"CustomerRealeaseNumber",
|
||||
"Quantity",
|
||||
"DeliveryDate",
|
||||
"CustomerID",
|
||||
"Remark",
|
||||
// "InvoiceID",
|
||||
],
|
||||
];
|
||||
|
||||
// create a new workbook
|
||||
const wb = XLSX.utils.book_new();
|
||||
const ws = XLSX.utils.aoa_to_sheet(headers);
|
||||
//const ws2 = XLSX.utils.aoa_to_sheet(headers2);
|
||||
|
||||
const columnWidths = headers[0].map((header) => ({
|
||||
width: header.length + 2,
|
||||
}));
|
||||
|
||||
ws["!cols"] = columnWidths;
|
||||
|
||||
// append the worksheet to the workbook
|
||||
XLSX.utils.book_append_sheet(wb, ws, `Sheet1`);
|
||||
//XLSX.utils.book_append_sheet(wb, ws2, `Sheet2`);
|
||||
|
||||
// Write the excel file and trigger the download'
|
||||
XLSX.writeFile(wb, "BulkOrdersTemplate");
|
||||
|
||||
// Write the workbook to a buffer and return it
|
||||
const excelBuffer = XLSX.write(wb, { bookType: "xlsx", type: "buffer" });
|
||||
|
||||
return excelBuffer;
|
||||
};
|
||||
@@ -0,0 +1,184 @@
|
||||
import XLSX from "xlsx";
|
||||
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
|
||||
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
import { query } from "../../../../../sqlServer/prodSqlServer.js";
|
||||
import { bulkOrderArticleInfo } from "../../../../../sqlServer/querys/dm/bulkOrderArticleInfo.js";
|
||||
import { addDays, addHours, isAfter, parse } from "date-fns";
|
||||
import { orderState } from "../../../../../sqlServer/querys/dm/orderState.js";
|
||||
import { postOrders } from "../postOrders.js";
|
||||
|
||||
// customeris/articles stuff will be in basis once we move to iowa
|
||||
let customerID = 8;
|
||||
let invoiceID = 9;
|
||||
let articles = "118,120";
|
||||
export const abbottOrders = async (data: any, user: any) => {
|
||||
/**
|
||||
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
|
||||
*/
|
||||
|
||||
const { data: s, error: e } = await tryCatch(db.select().from(settings));
|
||||
|
||||
if (e) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting settings`,
|
||||
data: e,
|
||||
};
|
||||
}
|
||||
|
||||
// articleInfo
|
||||
const { data: article, error: ae } = await tryCatch(
|
||||
query(
|
||||
bulkOrderArticleInfo.replace("[articles]", articles),
|
||||
"Get Article data for bulk orders"
|
||||
)
|
||||
);
|
||||
const a: any = article?.data;
|
||||
if (ae) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting article data`,
|
||||
data: ae,
|
||||
};
|
||||
}
|
||||
|
||||
// order state
|
||||
const { data: o, error: oe } = await tryCatch(
|
||||
query(orderState, "Gets the next 500 orders that have not been started")
|
||||
);
|
||||
|
||||
const openOrders: any = o?.data;
|
||||
|
||||
if (oe) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting article data`,
|
||||
data: oe,
|
||||
};
|
||||
}
|
||||
|
||||
const plantToken = s.filter((s) => s.name === "plantToken");
|
||||
|
||||
const arrayBuffer = await data.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const workbook = XLSX.read(buffer, { type: "buffer" });
|
||||
|
||||
const sheetName = workbook.SheetNames[0];
|
||||
const sheet = workbook.Sheets[sheetName];
|
||||
|
||||
// Define custom headers
|
||||
const customHeaders = ["date", "time", "newton8oz", "newton10oz"];
|
||||
const orderData = XLSX.utils.sheet_to_json(sheet, {
|
||||
range: 5, // Start at row 5 (index 4)
|
||||
header: customHeaders,
|
||||
defval: "", // Default value for empty cells
|
||||
});
|
||||
|
||||
// the base of the import
|
||||
const predefinedObject = {
|
||||
receivingPlantId: plantToken[0].value,
|
||||
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
sender: user.username || "lst-system",
|
||||
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
orders: [],
|
||||
};
|
||||
const oOrders: any = openOrders;
|
||||
|
||||
let correctedOrders: any = orderData
|
||||
.filter(
|
||||
(o: any) =>
|
||||
(o.newton8oz && o.newton8oz.trim() !== "") ||
|
||||
(o.newton10oz && o.newton10oz.trim() !== "")
|
||||
)
|
||||
.map((o: any) => ({
|
||||
date: excelDateStuff(o.date, o.time),
|
||||
po:
|
||||
o.newton8oz.replace(/\s+/g, "") !== ""
|
||||
? o.newton8oz.replace(/\s+/g, "")
|
||||
: o.newton10oz.replace(/\s+/g, ""),
|
||||
customerArticlenumber:
|
||||
o.newton8oz != ""
|
||||
? a.filter((a: any) => a.av === 118)[0]
|
||||
.CustomerArticleNumber
|
||||
: a.filter((a: any) => a.av === 120)[0]
|
||||
.CustomerArticleNumber,
|
||||
qty:
|
||||
o.newton8oz != ""
|
||||
? a.filter((a: any) => a.av === 118)[0].totalTruckLoad
|
||||
: a.filter((a: any) => a.av === 120)[0].totalTruckLoad,
|
||||
}));
|
||||
|
||||
// now we want to make sure we only correct orders that or after now
|
||||
correctedOrders = correctedOrders.filter((o: any) => {
|
||||
const parsedDate = parse(o.date, "M/d/yyyy, h:mm:ss a", new Date());
|
||||
return isAfter(o.date, new Date().toISOString());
|
||||
});
|
||||
|
||||
// last map to remove orders that have already been started
|
||||
// correctedOrders = correctedOrders.filter((oo: any) =>
|
||||
// oOrders.some((o: any) => o.CustomerOrderNumber === oo.po)
|
||||
// );
|
||||
let postedOrders: any = [];
|
||||
const filterOrders: any = correctedOrders;
|
||||
filterOrders.forEach((oo: any) => {
|
||||
const isMatch = openOrders.some(
|
||||
(o: any) => String(o.po).trim() === String(oo.po).trim()
|
||||
);
|
||||
if (!isMatch) {
|
||||
//console.log(`ok to update: ${oo.po}`);
|
||||
|
||||
// oo = {
|
||||
// ...oo,
|
||||
// CustomerOrderNumber: oo.CustomerOrderNumber.replace(" ", ""),
|
||||
// };
|
||||
postedOrders.push(oo);
|
||||
} else {
|
||||
// console.log(`Not valid order to update: ${oo.po}`);
|
||||
//console.log(oo)
|
||||
}
|
||||
});
|
||||
|
||||
// Map Excel data to predefinedObject format
|
||||
const orders = filterOrders.map((o: any) => {
|
||||
return {
|
||||
customerId: customerID,
|
||||
invoiceAddressId: invoiceID,
|
||||
customerOrderNo: o.po,
|
||||
orderDate: new Date(Date.now()).toLocaleString("en-US"),
|
||||
positions: [
|
||||
{
|
||||
deliveryAddressId: 8,
|
||||
customerArticleNo: o.customerArticlenumber,
|
||||
quantity: o.qty,
|
||||
deliveryDate: addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
|
||||
customerLineItemNo: 1, // this is how it is currently sent over from abbott
|
||||
customerReleaseNo: 1, // same as above
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
// combine it all together.
|
||||
const updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
orders: [...predefinedObject.orders, ...orders],
|
||||
};
|
||||
|
||||
//console.log(updatedPredefinedObject);
|
||||
// post the orders to the server
|
||||
const posting = await postOrders(updatedPredefinedObject, user);
|
||||
//console.log(posting);
|
||||
|
||||
return {
|
||||
success: posting?.success,
|
||||
message: posting?.message,
|
||||
data: posting,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,172 @@
|
||||
import XLSX from "xlsx";
|
||||
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
import { query } from "../../../../../sqlServer/prodSqlServer.js";
|
||||
import { orderState } from "../../../../../sqlServer/querys/dm/orderState.js";
|
||||
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
|
||||
import { invoiceAddress } from "../../../../../sqlServer/querys/dm/invoiceAddress.js";
|
||||
import { postOrders } from "../postOrders.js";
|
||||
|
||||
export const energizerOrders = async (data: any, user: any) => {
|
||||
/**
|
||||
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
|
||||
*/
|
||||
|
||||
const { data: s, error: e } = await tryCatch(db.select().from(settings));
|
||||
|
||||
if (e) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting settings`,
|
||||
data: e,
|
||||
};
|
||||
}
|
||||
|
||||
// order state
|
||||
const { data: o, error: oe } = await tryCatch(
|
||||
query(orderState, "Gets the next 500 orders that have not been started")
|
||||
);
|
||||
|
||||
const openOrders: any = o?.data;
|
||||
|
||||
if (oe) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting article data`,
|
||||
data: oe,
|
||||
};
|
||||
}
|
||||
|
||||
// order state
|
||||
const { data: invoice, error: ie } = await tryCatch(
|
||||
query(invoiceAddress, "Gets invoices addresses")
|
||||
);
|
||||
const i: any = invoice?.data;
|
||||
|
||||
if (ie) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting invoice address data`,
|
||||
data: ie,
|
||||
};
|
||||
}
|
||||
const plantToken = s.filter((s) => s.name === "plantToken");
|
||||
|
||||
const arrayBuffer = await data.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const workbook = XLSX.read(buffer, { type: "buffer" });
|
||||
|
||||
const sheetName = workbook.SheetNames[0];
|
||||
const sheet = workbook.Sheets[sheetName];
|
||||
|
||||
// define custom headers
|
||||
const headers = [
|
||||
"ITEM",
|
||||
"PO",
|
||||
"ReleaseNo",
|
||||
"QTY",
|
||||
"DELDATE",
|
||||
"COMMENTS",
|
||||
"What changed",
|
||||
"CUSTOMERID",
|
||||
"Remark",
|
||||
];
|
||||
const orderData = XLSX.utils.sheet_to_json(sheet, {
|
||||
defval: "",
|
||||
header: headers,
|
||||
range: 1,
|
||||
});
|
||||
|
||||
// the base of the import
|
||||
const predefinedObject = {
|
||||
receivingPlantId: plantToken[0].value,
|
||||
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
sender: user.username || "lst-system",
|
||||
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
orders: [],
|
||||
};
|
||||
|
||||
let newOrders: any = orderData;
|
||||
|
||||
// filter out the orders that have already been started just to reduce the risk of errors.
|
||||
newOrders.filter((oo: any) =>
|
||||
openOrders.some(
|
||||
(o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber
|
||||
)
|
||||
);
|
||||
|
||||
// filter out the blanks
|
||||
newOrders = newOrders.filter((z: any) => z.ITEM !== "");
|
||||
|
||||
// let postedOrders: any = [];
|
||||
// for (const [customerID, orders] of Object.entries(orderData)) {
|
||||
// // console.log(`Running for Customer ID: ${customerID}`);
|
||||
// const newOrders: any = orderData;
|
||||
|
||||
// // filter out the orders that have already been started just to reduce the risk of errors.
|
||||
// newOrders.filter((oo: any) =>
|
||||
// openOrders.some(
|
||||
// (o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber
|
||||
// )
|
||||
// );
|
||||
|
||||
// // map everything out for each order
|
||||
const nOrder = newOrders.map((o: any) => {
|
||||
const invoice = i.filter(
|
||||
(i: any) => i.deliveryAddress === parseInt(o.CUSTOMERID)
|
||||
);
|
||||
if (!invoice) {
|
||||
return;
|
||||
}
|
||||
return {
|
||||
customerId: parseInt(o.CUSTOMERID),
|
||||
invoiceAddressId: invoice[0].invoiceAddress, // matched to the default invoice address
|
||||
customerOrderNo: o.PO,
|
||||
orderDate: new Date(Date.now()).toLocaleString("en-US"),
|
||||
positions: [
|
||||
{
|
||||
deliveryAddressId: parseInt(o.CUSTOMERID),
|
||||
customerArticleNo: o.ITEM,
|
||||
quantity: parseInt(o.QTY),
|
||||
deliveryDate: o.DELDATE, //excelDateStuff(o.DELDATE),
|
||||
customerLineItemNo: o.ReleaseNo, // this is how it is currently sent over from abbott
|
||||
customerReleaseNo: o.ReleaseNo, // same as above
|
||||
remark: o.remark === "" ? null : o.remark,
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
// // do that fun combining thing
|
||||
const updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
orders: [...predefinedObject.orders, ...nOrder],
|
||||
};
|
||||
|
||||
// //console.log(updatedPredefinedObject);
|
||||
|
||||
// // post the orders to the server
|
||||
const posting: any = await postOrders(updatedPredefinedObject, user);
|
||||
|
||||
return {
|
||||
customer: nOrder[0].CUSTOMERID,
|
||||
//totalOrders: orders?.length(),
|
||||
success: posting.success,
|
||||
message: posting.message,
|
||||
data: posting.data,
|
||||
};
|
||||
// }
|
||||
|
||||
// return {
|
||||
// success: true,
|
||||
// message:
|
||||
// "Standard Template was just processed successfully, please check AlplaProd 2.0 to confirm no errors. ",
|
||||
// data: nOrder,
|
||||
// };
|
||||
};
|
||||
@@ -0,0 +1,200 @@
|
||||
import { delay } from "../../../../../../globalUtils/delay.js";
|
||||
import XLSX from "xlsx";
|
||||
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
import { query } from "../../../../../sqlServer/prodSqlServer.js";
|
||||
import { orderState } from "../../../../../sqlServer/querys/dm/orderState.js";
|
||||
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
|
||||
import { invoiceAddress } from "../../../../../sqlServer/querys/dm/invoiceAddress.js";
|
||||
import { postOrders } from "../postOrders.js";
|
||||
|
||||
export const macroImportOrders = async (data: any, user: any) => {
|
||||
/**
|
||||
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
|
||||
*/
|
||||
|
||||
const { data: s, error: e } = await tryCatch(db.select().from(settings));
|
||||
|
||||
if (e) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting settings`,
|
||||
data: e,
|
||||
};
|
||||
}
|
||||
|
||||
// order state
|
||||
const { data: o, error: oe } = await tryCatch(
|
||||
query(orderState, "Gets the next 500 orders that have not been started")
|
||||
);
|
||||
|
||||
const openOrders: any = o?.data;
|
||||
|
||||
if (oe) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting article data`,
|
||||
data: oe,
|
||||
};
|
||||
}
|
||||
|
||||
// order state
|
||||
const { data: invoice, error: ie } = await tryCatch(
|
||||
query(invoiceAddress, "Gets invoices addresses")
|
||||
);
|
||||
const i: any = invoice?.data;
|
||||
if (ie) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting invoice address data`,
|
||||
data: ie,
|
||||
};
|
||||
}
|
||||
const plantToken = s.filter((s) => s.name === "plantToken");
|
||||
|
||||
const arrayBuffer = await data.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const workbook = XLSX.read(buffer, { type: "buffer" });
|
||||
|
||||
//const sheetName = workbook.SheetNames[0];
|
||||
const sheet = workbook.Sheets["Data"];
|
||||
|
||||
// define custom headers
|
||||
const headers = [
|
||||
"CustomerArticleNumber",
|
||||
"CustomerOrderNumber",
|
||||
"CustomerLineNumber",
|
||||
"CustomerRealeaseNumber",
|
||||
"Quantity",
|
||||
"DeliveryDate",
|
||||
"CustomerID",
|
||||
"Remark",
|
||||
];
|
||||
const orderData = XLSX.utils.sheet_to_json(sheet, {
|
||||
defval: "",
|
||||
header: headers,
|
||||
range: 5,
|
||||
});
|
||||
|
||||
// the base of the import
|
||||
const predefinedObject = {
|
||||
receivingPlantId: plantToken[0].value,
|
||||
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
sender: user.username || "lst-system",
|
||||
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
orders: [],
|
||||
};
|
||||
|
||||
const removeBlanks = orderData.filter(
|
||||
(n: any) => n.CustomerArticleNumber != ""
|
||||
);
|
||||
|
||||
console.log(removeBlanks);
|
||||
const groupedByCustomer: any = removeBlanks.reduce(
|
||||
(acc: any, item: any) => {
|
||||
const id = item.CustomerID;
|
||||
if (!acc[id]) {
|
||||
acc[id] = [];
|
||||
}
|
||||
acc[id].push(item);
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
let postedOrders: any = [];
|
||||
for (const [customerID, orders] of Object.entries(groupedByCustomer)) {
|
||||
// console.log(`Running for Customer ID: ${customerID}`);
|
||||
const filterOrders: any = orders;
|
||||
const newOrders: any = [];
|
||||
//newOrders.filter((oo) => openOrders.some((o) => String(o.CustomerOrderNumber) === String(oo.CustomerOrderNumber)));
|
||||
//console.log(newOrders)
|
||||
filterOrders.forEach((oo: any) => {
|
||||
const isMatch = openOrders.some(
|
||||
(o: any) =>
|
||||
// check the header
|
||||
String(o.CustomerOrderNumber).trim() ===
|
||||
String(oo.CustomerOrderNumber).trim() &&
|
||||
// and check the customer release is not in here.
|
||||
String(o.CustomerRealeaseNumber).trim() ===
|
||||
String(oo.CustomerRealeaseNumber).trim()
|
||||
);
|
||||
if (!isMatch) {
|
||||
console.log(`ok to update: ${oo.CustomerOrderNumber}`);
|
||||
|
||||
newOrders.push(oo);
|
||||
} else {
|
||||
console.log(
|
||||
`Not valid order to update: ${oo.CustomerOrderNumber}`
|
||||
);
|
||||
//console.log(oo)
|
||||
}
|
||||
});
|
||||
|
||||
// filter out the orders that have already been started just to reduce the risk of errors.
|
||||
newOrders.filter((oo: any) =>
|
||||
openOrders.some(
|
||||
(o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber
|
||||
)
|
||||
);
|
||||
|
||||
// map everything out for each order
|
||||
const nOrder = newOrders.map((o: any) => {
|
||||
const invoice = i.filter(
|
||||
(i: any) => i.deliveryAddress === parseInt(customerID)
|
||||
);
|
||||
if (!invoice) {
|
||||
return;
|
||||
}
|
||||
return {
|
||||
customerId: parseInt(customerID),
|
||||
invoiceAddressId: invoice[0]?.invoiceAddress, // matched to the default invoice address
|
||||
customerOrderNo: o.CustomerOrderNumber,
|
||||
orderDate: new Date(Date.now()).toLocaleString("en-US"),
|
||||
positions: [
|
||||
{
|
||||
deliveryAddressId: parseInt(customerID),
|
||||
customerArticleNo: o.CustomerArticleNumber,
|
||||
quantity: parseInt(o.Quantity),
|
||||
deliveryDate: excelDateStuff(o.DeliveryDate),
|
||||
customerLineItemNo: o.CustomerLineNumber, // this is how it is currently sent over from abbott
|
||||
customerReleaseNo: o.CustomerRealeaseNumber, // same as above
|
||||
remark: o.remark === "" ? null : o.remark,
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
// do that fun combining thing
|
||||
const updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
orders: [...predefinedObject.orders, ...nOrder],
|
||||
};
|
||||
|
||||
//console.log(updatedPredefinedObject);
|
||||
|
||||
// post the orders to the server
|
||||
const posting: any = await postOrders(updatedPredefinedObject, user);
|
||||
|
||||
postedOrders.push({
|
||||
customer: customerID,
|
||||
//totalOrders: orders?.length(),
|
||||
success: posting.success,
|
||||
message: posting.message,
|
||||
data: posting.data,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message:
|
||||
"Standard Template was just processed successfully, please check AlplaProd 2.0 to confirm no errors. ",
|
||||
data: postedOrders,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,192 @@
|
||||
import { delay } from "../../../../../../globalUtils/delay.js";
|
||||
import XLSX from "xlsx";
|
||||
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
import { query } from "../../../../../sqlServer/prodSqlServer.js";
|
||||
import { orderState } from "../../../../../sqlServer/querys/dm/orderState.js";
|
||||
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
|
||||
import { invoiceAddress } from "../../../../../sqlServer/querys/dm/invoiceAddress.js";
|
||||
import { postOrders } from "../postOrders.js";
|
||||
|
||||
export const standardOrders = async (data: any, user: any) => {
|
||||
/**
|
||||
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
|
||||
*/
|
||||
|
||||
const { data: s, error: e } = await tryCatch(db.select().from(settings));
|
||||
|
||||
if (e) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting settings`,
|
||||
data: e,
|
||||
};
|
||||
}
|
||||
|
||||
// order state
|
||||
const { data: o, error: oe } = await tryCatch(
|
||||
query(orderState, "Gets the next 500 orders that have not been started")
|
||||
);
|
||||
|
||||
const openOrders: any = o?.data;
|
||||
|
||||
if (oe) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting article data`,
|
||||
data: oe,
|
||||
};
|
||||
}
|
||||
|
||||
// order state
|
||||
const { data: invoice, error: ie } = await tryCatch(
|
||||
query(invoiceAddress, "Gets invoices addresses")
|
||||
);
|
||||
const i: any = invoice?.data;
|
||||
if (ie) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Error getting invoice address data`,
|
||||
data: ie,
|
||||
};
|
||||
}
|
||||
const plantToken = s.filter((s) => s.name === "plantToken");
|
||||
|
||||
const arrayBuffer = await data.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const workbook = XLSX.read(buffer, { type: "buffer" });
|
||||
|
||||
const sheetName = workbook.SheetNames[0];
|
||||
const sheet = workbook.Sheets[sheetName];
|
||||
|
||||
// define custom headers
|
||||
const headers = [
|
||||
"CustomerArticleNumber",
|
||||
"CustomerOrderNumber",
|
||||
"CustomerLineNumber",
|
||||
"CustomerRealeaseNumber",
|
||||
"Quantity",
|
||||
"DeliveryDate",
|
||||
"CustomerID",
|
||||
"Remark",
|
||||
];
|
||||
const orderData = XLSX.utils.sheet_to_json(sheet, {
|
||||
defval: "",
|
||||
header: headers,
|
||||
range: 1,
|
||||
});
|
||||
|
||||
// the base of the import
|
||||
const predefinedObject = {
|
||||
receivingPlantId: plantToken[0].value,
|
||||
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
sender: user.username || "lst-system",
|
||||
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
|
||||
"en-US"
|
||||
)}`,
|
||||
orders: [],
|
||||
};
|
||||
|
||||
const groupedByCustomer: any = orderData.reduce((acc: any, item: any) => {
|
||||
const id = item.CustomerID;
|
||||
if (!acc[id]) {
|
||||
acc[id] = [];
|
||||
}
|
||||
acc[id].push(item);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
let postedOrders: any = [];
|
||||
for (const [customerID, orders] of Object.entries(groupedByCustomer)) {
|
||||
// console.log(`Running for Customer ID: ${customerID}`);
|
||||
const filterOrders: any = orders;
|
||||
const newOrders: any = [];
|
||||
//newOrders.filter((oo) => openOrders.some((o) => String(o.CustomerOrderNumber) === String(oo.CustomerOrderNumber)));
|
||||
//console.log(newOrders)
|
||||
filterOrders.forEach((oo: any) => {
|
||||
const isMatch = openOrders.some(
|
||||
(o: any) =>
|
||||
// check the header
|
||||
String(o.CustomerOrderNumber).trim() ===
|
||||
String(oo.CustomerOrderNumber).trim() &&
|
||||
// and check the customer release is not in here.
|
||||
String(o.CustomerRealeaseNumber).trim() ===
|
||||
String(oo.CustomerRealeaseNumber).trim()
|
||||
);
|
||||
if (!isMatch) {
|
||||
console.log(`ok to update: ${oo.CustomerOrderNumber}`);
|
||||
|
||||
newOrders.push(oo);
|
||||
} else {
|
||||
console.log(
|
||||
`Not valid order to update: ${oo.CustomerOrderNumber}`
|
||||
);
|
||||
//console.log(oo)
|
||||
}
|
||||
});
|
||||
|
||||
// filter out the orders that have already been started just to reduce the risk of errors.
|
||||
newOrders.filter((oo: any) =>
|
||||
openOrders.some(
|
||||
(o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber
|
||||
)
|
||||
);
|
||||
|
||||
// map everything out for each order
|
||||
const nOrder = newOrders.map((o: any) => {
|
||||
const invoice = i.filter(
|
||||
(i: any) => i.deliveryAddress === parseInt(customerID)
|
||||
);
|
||||
if (!invoice) {
|
||||
return;
|
||||
}
|
||||
return {
|
||||
customerId: parseInt(customerID),
|
||||
invoiceAddressId: invoice[0]?.invoiceAddress, // matched to the default invoice address
|
||||
customerOrderNo: o.CustomerOrderNumber,
|
||||
orderDate: new Date(Date.now()).toLocaleString("en-US"),
|
||||
positions: [
|
||||
{
|
||||
deliveryAddressId: parseInt(customerID),
|
||||
customerArticleNo: o.CustomerArticleNumber,
|
||||
quantity: parseInt(o.Quantity),
|
||||
deliveryDate: excelDateStuff(o.DeliveryDate),
|
||||
customerLineItemNo: o.CustomerLineNumber, // this is how it is currently sent over from abbott
|
||||
customerReleaseNo: o.CustomerRealeaseNumber, // same as above
|
||||
remark: o.Remark === "" ? null : o.Remark,
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
// do that fun combining thing
|
||||
const updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
orders: [...predefinedObject.orders, ...nOrder],
|
||||
};
|
||||
|
||||
//console.log(updatedPredefinedObject.orders[0]);
|
||||
|
||||
// post the orders to the server
|
||||
const posting: any = await postOrders(updatedPredefinedObject, user);
|
||||
|
||||
postedOrders.push({
|
||||
customer: customerID,
|
||||
//totalOrders: orders?.length(),
|
||||
success: posting.success,
|
||||
message: posting.message,
|
||||
data: posting.data,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message:
|
||||
"Standard Template was just processed successfully, please check AlplaProd 2.0 to confirm no errors. ",
|
||||
data: postedOrders,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,61 @@
|
||||
import { abbottOrders } from "./mappings/abbottTruckList.js";
|
||||
import { energizerOrders } from "./mappings/energizerOrdersIn.js";
|
||||
import { macroImportOrders } from "./mappings/macroImport.js";
|
||||
import { standardOrders } from "./mappings/standardOrders.js";
|
||||
|
||||
export const ordersIn = async (data: any, user: any) => {
|
||||
/**
|
||||
* Bulk orders in, and custom file parsing.
|
||||
*/
|
||||
|
||||
let success = true;
|
||||
let message = "";
|
||||
let orderData: any = [];
|
||||
|
||||
// what type of order are we dealing with?
|
||||
if (data["fileType"] === "standard") {
|
||||
// run the standard orders in
|
||||
const standard = await standardOrders(data["postOrders"], user);
|
||||
success = standard.success ?? false;
|
||||
message = standard.message ?? "Error posting Standard Orders";
|
||||
orderData = standard.data;
|
||||
}
|
||||
|
||||
if (data["fileType"] === "abbott") {
|
||||
// orders in
|
||||
const abbott = await abbottOrders(data["postOrders"], user);
|
||||
success = abbott.success ?? false;
|
||||
message = abbott.message ?? "Error posting Abbott Orders";
|
||||
orderData = abbott.data;
|
||||
}
|
||||
|
||||
if (data["fileType"] === "energizer") {
|
||||
// orders in
|
||||
const energizer = await energizerOrders(data["postOrders"], user);
|
||||
success = energizer.success ?? false;
|
||||
message = energizer.message ?? "Error posting Energizer Orders";
|
||||
orderData = energizer.data;
|
||||
}
|
||||
|
||||
if (data["fileType"] === "loreal") {
|
||||
// orders in
|
||||
}
|
||||
|
||||
if (data["fileType"] === "pg") {
|
||||
// orders in
|
||||
}
|
||||
|
||||
if (data["fileType"] === "macro") {
|
||||
// orders in
|
||||
const macro = await macroImportOrders(data["postOrders"], user);
|
||||
success = macro.success ?? false;
|
||||
message = macro.message ?? "Error posting Macro Orders";
|
||||
orderData = macro.data;
|
||||
}
|
||||
|
||||
return {
|
||||
success,
|
||||
message,
|
||||
data: orderData,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,62 @@
|
||||
import axios from "axios";
|
||||
import { prodEndpointCreation } from "../../../../../globalUtils/createUrl.js";
|
||||
import { createLog } from "../../../../logger/logger.js";
|
||||
|
||||
export const postOrders = async (data: any, user: any) => {
|
||||
let endpoint = await prodEndpointCreation(
|
||||
"/public/v1.0/DemandManagement/ORDERS"
|
||||
);
|
||||
|
||||
try {
|
||||
const results = await axios({
|
||||
url: endpoint,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": process.env.TEC_API_KEY || "",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
// if a body is sent over it would be like below
|
||||
data: data,
|
||||
});
|
||||
|
||||
//console.log(results.status);
|
||||
if (results.data.errors) {
|
||||
createLog(
|
||||
"error",
|
||||
user.username,
|
||||
"logisitcs",
|
||||
results.data.errors[0].message
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: "Error processing orders",
|
||||
data: results.data.errors[0].message,
|
||||
};
|
||||
}
|
||||
|
||||
if (results.status === 200) {
|
||||
createLog(
|
||||
"info",
|
||||
user.username,
|
||||
"logisitcs",
|
||||
"Orders were processed please check 2.0 for validation and errors"
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: "Success on posting orders",
|
||||
data: data,
|
||||
};
|
||||
}
|
||||
} catch (error: any) {
|
||||
//console.log(`There is an error`, error);
|
||||
if (error) {
|
||||
//console.log(error.response.data);
|
||||
createLog("error", user.username, "logisitcs", error.response.data);
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error processing data",
|
||||
data: error.response.data,
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,93 @@
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { totalInvNoRn } from "../../../sqlServer/querys/dataMart/totalINV.js";
|
||||
import { invHistoricalData } from "../../../../../database/schema/historicalINV.js";
|
||||
import { format } from "date-fns-tz";
|
||||
import { settings } from "../../../../../database/schema/settings.js";
|
||||
import { sql } from "drizzle-orm";
|
||||
import { createLogisticsJob } from "../../utils/logisticsIntervals.js";
|
||||
|
||||
export const runHistoricalData = async () => {
|
||||
/**
|
||||
* Runs a query at shift change on first shift each day this will be the closest date to the true historical data for blocked, consignment
|
||||
*/
|
||||
|
||||
const { data: set, error: setError } = await tryCatch(
|
||||
db.select().from(settings)
|
||||
);
|
||||
|
||||
if (setError) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"eom",
|
||||
"There was an error getting eom historical inv data."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const timeZone = set.filter((n: any) => n.name === "timezone");
|
||||
|
||||
createLogisticsJob("histInv", `0 6 * * *`, timeZone[0].value, async () => {
|
||||
// remove the lotnumber from the query
|
||||
const updatedQuery = totalInvNoRn.replaceAll(
|
||||
",IdProdPlanung",
|
||||
"--,IdProdPlanung"
|
||||
);
|
||||
|
||||
const { data: inv, error: invError } = await tryCatch(
|
||||
query(updatedQuery, "EOM historical inv")
|
||||
);
|
||||
|
||||
if (invError) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"eom",
|
||||
"There was an error getting eom historical inv data."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* add the inv into the hist table
|
||||
*/
|
||||
|
||||
const setting: any = set;
|
||||
|
||||
for (let i = 0; i < inv?.data.length; i++) {
|
||||
const current = inv?.data[i];
|
||||
const { data, error } = await tryCatch(
|
||||
db.insert(invHistoricalData).values({
|
||||
histDate: format(new Date(), "MM-dd-yyyy"),
|
||||
plantToken: setting.filter(
|
||||
(n: any) => n.name === "plantToken"
|
||||
)[0].value,
|
||||
article: current.av,
|
||||
articleDescription: current.Alias,
|
||||
total_QTY: current.Total_PalletQTY,
|
||||
avaliable_QTY: current.Avaliable_PalletQTY,
|
||||
coa_QTY: current.COA_QTY,
|
||||
held_QTY: current.Held_QTY,
|
||||
consignment: current.Consigment,
|
||||
//location: integer("location"),
|
||||
upd_user: "LST",
|
||||
upd_date: sql`NOW()`,
|
||||
})
|
||||
);
|
||||
|
||||
createLog("info", "lst", "eom", ` historical data was just added.`);
|
||||
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"eom",
|
||||
`Error addeding historical data, ${error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,77 @@
|
||||
import axios from "axios";
|
||||
import { labelData } from "../../../sqlServer/querys/materialHelpers/labelInfo.js";
|
||||
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { commandLog } from "../../../../../database/schema/commandLog.js";
|
||||
|
||||
type Data = {
|
||||
runningNr: string;
|
||||
lotNum: number;
|
||||
};
|
||||
export const consumeMaterial = async (data: Data) => {
|
||||
const { runningNr, lotNum } = data;
|
||||
// replace the rn
|
||||
|
||||
console.log(data);
|
||||
|
||||
const rnReplace = labelData.replaceAll("[rn]", runningNr);
|
||||
|
||||
let barcode;
|
||||
// get the barcode from the running number
|
||||
try {
|
||||
const r: any = await query(rnReplace, "labelData");
|
||||
barcode = r?.data;
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
createLog("error", "", "logistics", `Error getting barcode: ${error}`);
|
||||
}
|
||||
|
||||
if (barcode.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: "The running number you've entered not on stock.",
|
||||
};
|
||||
//throw Error("The provided runningNr is not in stock");
|
||||
}
|
||||
// create the url to post
|
||||
const url = await prodEndpointCreation(
|
||||
"/public/v1.0/IssueMaterial/ConsumeNonPreparedManualMaterial"
|
||||
);
|
||||
|
||||
const consumeSomething = {
|
||||
productionLot: lotNum,
|
||||
barcode: barcode[0]?.barcode,
|
||||
};
|
||||
|
||||
try {
|
||||
const results = await axios.post(url, consumeSomething, {
|
||||
headers: {
|
||||
"X-API-Key": process.env.TEC_API_KEY || "",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
//console.log(results);
|
||||
const { data: commandL, error: ce } = await tryCatch(
|
||||
db.insert(commandLog).values({
|
||||
commandUsed: "consumeMaterial",
|
||||
bodySent: data,
|
||||
})
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: "Material was consumed",
|
||||
status: results.status,
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
status: 200,
|
||||
message: error.response?.data.errors[0].message,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,109 @@
|
||||
import axios from "axios";
|
||||
import { labelData } from "../../../sqlServer/querys/materialHelpers/labelInfo.js";
|
||||
import { laneInfo } from "../../../sqlServer/querys/materialHelpers/laneInfo.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { commandLog } from "../../../../../database/schema/commandLog.js";
|
||||
type Data = {
|
||||
runningNr: string;
|
||||
laneName: string;
|
||||
};
|
||||
export const returnMaterial = async (data: Data, prod: any) => {
|
||||
const { runningNr, laneName } = data;
|
||||
// replace the rn
|
||||
const rnReplace = labelData.replaceAll("[rn]", runningNr);
|
||||
|
||||
// get the lane id by name
|
||||
const laneQuery = laneInfo.replaceAll("[laneName]", laneName);
|
||||
|
||||
let barcode;
|
||||
// get the barcode from the running number
|
||||
try {
|
||||
const r: any = await query(rnReplace, "labelData");
|
||||
barcode = r?.data;
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
createLog(
|
||||
"error",
|
||||
prod.user.username,
|
||||
"logistics",
|
||||
`Error getting barcode: ${error}`
|
||||
);
|
||||
}
|
||||
|
||||
const { data: l, error: laneError } = await tryCatch(
|
||||
query(laneQuery, "laneInfo")
|
||||
);
|
||||
const laneData: any = l?.data;
|
||||
if (laneError) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The lane you entered is either deactivated or dose not exist.",
|
||||
laneError,
|
||||
};
|
||||
}
|
||||
|
||||
if (!laneData) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The lane you entered is either deactivated or dose not exist.",
|
||||
};
|
||||
}
|
||||
|
||||
if (laneData.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The lane you entered is either deactivated or dose not exist.",
|
||||
};
|
||||
}
|
||||
|
||||
if (barcode.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: "The running number you've is not in stock.",
|
||||
};
|
||||
//throw Error("The provided runningNr is not in stock");
|
||||
}
|
||||
// create the url to post
|
||||
const url = await prodEndpointCreation(
|
||||
"/public/v1.0/IssueMaterial/ReturnPartiallyConsumedManualMaterial"
|
||||
);
|
||||
|
||||
const returnSomething = {
|
||||
laneId: laneData[0]?.laneID,
|
||||
barcode: barcode[0]?.barcode,
|
||||
};
|
||||
|
||||
try {
|
||||
const results = await axios.post(url, returnSomething, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Basic ${prod.user.prod}`,
|
||||
},
|
||||
});
|
||||
//console.log(results);
|
||||
const { data: commandL, error: ce } = await tryCatch(
|
||||
db.insert(commandLog).values({
|
||||
commandUsed: "returnMaterial",
|
||||
bodySent: data,
|
||||
})
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: "Material was returned",
|
||||
status: results.status,
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
status: 200,
|
||||
message: error.response?.data.errors[0].message,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,154 @@
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { siloQuery } from "../../../sqlServer/querys/silo/siloQuery.js";
|
||||
import { postAdjustment } from "./postAdjustment.js";
|
||||
import { siloAdjustments } from "../../../../../database/schema/siloAdjustments.js";
|
||||
import { greetingStuff } from "../../../../globalUtils/greetingEmail.js";
|
||||
import { sendEmail } from "../../../notifications/controller/sendMail.js";
|
||||
import { settings } from "../../../../../database/schema/settings.js";
|
||||
import { generateOneTimeKey } from "../../../../globalUtils/singleUseKey.js";
|
||||
import { eq } from "drizzle-orm";
|
||||
import {
|
||||
getSettings,
|
||||
serverSettings,
|
||||
} from "../../../server/controller/settings/getSettings.js";
|
||||
|
||||
export const createSiloAdjustment = async (
|
||||
data: any | null,
|
||||
user: any | null
|
||||
) => {
|
||||
/**
|
||||
* Creates a silo adjustment based off warehouse, location, and qty.
|
||||
* qty will come from the hmi, prolink, or silo patrol
|
||||
*/
|
||||
// const { data: set, error: setError } = await tryCatch(
|
||||
// db.select().from(settings)
|
||||
// );
|
||||
|
||||
// const { data: set, error: setError } = await tryCatch(getSettings());
|
||||
|
||||
// if (setError) {
|
||||
// return {
|
||||
// success: false,
|
||||
// message: `There was an error getting setting data to post to the server.`,
|
||||
// data: setError,
|
||||
// };
|
||||
// }
|
||||
|
||||
const set = serverSettings.length === 0 ? [] : serverSettings;
|
||||
// getting stock data first so we have it prior to the adjustment
|
||||
const { data: s, error: stockError } = await tryCatch(
|
||||
query(siloQuery, "Silo data Query")
|
||||
);
|
||||
|
||||
if (stockError) {
|
||||
return {
|
||||
success: false,
|
||||
message: `There was an error getting stock data to post to the server.`,
|
||||
data: stockError,
|
||||
};
|
||||
}
|
||||
const stock: any = s?.data as any;
|
||||
const { data: a, error: errorAdj } = await tryCatch(
|
||||
postAdjustment(data, user.prod)
|
||||
);
|
||||
|
||||
if (errorAdj) {
|
||||
return {
|
||||
success: false,
|
||||
message: `There was an error doing the silo adjustment.`,
|
||||
data: errorAdj,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Checking to see the difference, and send email if +/- 5% will change later if needed
|
||||
*/
|
||||
|
||||
const sa: any = a;
|
||||
|
||||
if (!sa.success) {
|
||||
console.log(`insde error`);
|
||||
return {
|
||||
success: sa.success,
|
||||
message: sa.message,
|
||||
data: sa.data,
|
||||
};
|
||||
}
|
||||
|
||||
const stockNummy = stock.filter((s: any) => s.LocationID === data.laneId);
|
||||
const theDiff =
|
||||
((data.quantity - stockNummy[0].Stock_Total) /
|
||||
((data.quantity + stockNummy[0].Stock_Total) / 2)) *
|
||||
100;
|
||||
|
||||
/**
|
||||
* Post the data to our db.
|
||||
*/
|
||||
|
||||
//console.log(stockNummy);
|
||||
const { data: postAdj, error: postAdjError } = await tryCatch(
|
||||
db
|
||||
.insert(siloAdjustments)
|
||||
.values({
|
||||
warehouseID: data.warehouseId,
|
||||
locationID: data.laneId,
|
||||
currentStockLevel: stockNummy[0].Stock_Total,
|
||||
newLevel: data.quantity,
|
||||
lastDateAdjusted: new Date(stockNummy[0].LastAdjustment),
|
||||
add_user: user.username,
|
||||
})
|
||||
.returning({ id: siloAdjustments.siloAdjust_id })
|
||||
);
|
||||
|
||||
if (postAdjError) {
|
||||
//console.log(postAdjError);
|
||||
return {
|
||||
success: false,
|
||||
message: `There was an error posting the new adjustment.`,
|
||||
data: postAdjError,
|
||||
};
|
||||
}
|
||||
let adj: any = a;
|
||||
if (Math.abs(theDiff) > 5) {
|
||||
// console.log(`Send for comment due to being: ${theDiff.toFixed(2)}%`);
|
||||
const server = set.filter((n: any) => n.name === "server");
|
||||
|
||||
const port = set.filter((n: any) => n.name === "serverPort");
|
||||
const key = await generateOneTimeKey();
|
||||
const updateKey = await db
|
||||
.update(siloAdjustments)
|
||||
.set({ commentKey: key })
|
||||
.where(eq(siloAdjustments.siloAdjust_id, postAdj[0].id));
|
||||
|
||||
const emailSetup = {
|
||||
email: user.email,
|
||||
subject: `Alert - Siloadjustment was done with a descrepancy of 5% or greater`,
|
||||
template: "siloAdjustmentComment",
|
||||
context: {
|
||||
greeting: await greetingStuff(),
|
||||
siloName: stockNummy[0].Description,
|
||||
variance: `${theDiff.toFixed(2)}%`,
|
||||
currentLevel: stockNummy[0].Stock_Total,
|
||||
newLevel: data.quantity,
|
||||
variancePer: 5,
|
||||
adjustID: `${postAdj[0].id}&${key}`,
|
||||
server: server[0].value,
|
||||
port: port[0].value,
|
||||
},
|
||||
};
|
||||
|
||||
//console.log(emailSetup);
|
||||
|
||||
await sendEmail(emailSetup);
|
||||
return {
|
||||
success: adj.success,
|
||||
message: `Silo adjustmnet was completed you will also receive and email due to the adjustment having a variation of ${Math.abs(
|
||||
theDiff
|
||||
).toFixed(2)}%`,
|
||||
data: adj.data,
|
||||
};
|
||||
} else {
|
||||
return { success: adj.success, message: adj.message, data: adj.data };
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,24 @@
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { siloQuery } from "../../../sqlServer/querys/silo/siloQuery.js";
|
||||
|
||||
export const getStockSiloData = async () => {
|
||||
/**
|
||||
* will return the current stock info where the silo is checked
|
||||
*/
|
||||
|
||||
const { data, error } = await tryCatch(query(siloQuery, "Get silo data"));
|
||||
|
||||
if (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: "There was a n error getting the silo data.",
|
||||
};
|
||||
}
|
||||
const stockData: any = data?.data;
|
||||
return {
|
||||
success: true,
|
||||
message: "Current silo data from alplastock.",
|
||||
data: stockData,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,79 @@
|
||||
import { between, desc, gte, lte } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { siloAdjustments } from "../../../../../database/schema/siloAdjustments.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
|
||||
export const getSiloAdjustments = async (startDate: any, endDate: any) => {
|
||||
/**
|
||||
* Returns silo adjustments by date or all
|
||||
*/
|
||||
|
||||
if (startDate && endDate) {
|
||||
const { data: adjRange, error: adjRangeError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(siloAdjustments)
|
||||
.where(
|
||||
between(
|
||||
siloAdjustments.dateAdjusted,
|
||||
new Date(startDate),
|
||||
new Date(endDate)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(siloAdjustments.dateAdjusted))
|
||||
);
|
||||
|
||||
if (adjRangeError) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error getting silo adjustments.",
|
||||
adjRangeError,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Silo adjustment data.",
|
||||
data: adjRange,
|
||||
};
|
||||
}
|
||||
|
||||
if (startDate) {
|
||||
const { data: adjRange, error: adjRangeError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(siloAdjustments)
|
||||
.where(gte(siloAdjustments.dateAdjusted, new Date(startDate)))
|
||||
.orderBy(desc(siloAdjustments.dateAdjusted))
|
||||
);
|
||||
if (adjRangeError)
|
||||
return {
|
||||
success: false,
|
||||
message: "Error getting silo adjustments.",
|
||||
adjRangeError,
|
||||
};
|
||||
return {
|
||||
success: true,
|
||||
message: "Silo adjustment data.",
|
||||
data: adjRange,
|
||||
};
|
||||
}
|
||||
|
||||
const { data: adjRange, error: adjRangeError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(siloAdjustments)
|
||||
.orderBy(desc(siloAdjustments.dateAdjusted))
|
||||
);
|
||||
if (adjRangeError)
|
||||
return {
|
||||
success: false,
|
||||
message: "Error getting silo adjustments.",
|
||||
adjRangeError,
|
||||
};
|
||||
return {
|
||||
success: true,
|
||||
message: "Silo adjustment data.",
|
||||
data: adjRange,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,96 @@
|
||||
/**
|
||||
* 1. Get the silo adjustments from lstv1
|
||||
* 2. Build the new data set to match the new system
|
||||
* 3. insert the new values
|
||||
*/
|
||||
|
||||
import axios from "axios";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { siloAdjustments } from "../../../../../database/schema/siloAdjustments.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { delay } from "../../../../globalUtils/delay.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { settings } from "../../../../../database/schema/settings.js";
|
||||
import { eq } from "drizzle-orm";
|
||||
import {
|
||||
getSettings,
|
||||
serverSettings,
|
||||
} from "../../../server/controller/settings/getSettings.js";
|
||||
|
||||
export const migrateAdjustments = async () => {
|
||||
/**
|
||||
* Migrates the silo adjustments from v1 to v2
|
||||
*/
|
||||
|
||||
//const { data, error } = await tryCatch(db.select().from(settings));
|
||||
// const { data, error } = await tryCatch(getSettings());
|
||||
|
||||
// if (error) {
|
||||
// createLog("error", "silo", "logistics", "Getting settings.");
|
||||
// return;
|
||||
// }
|
||||
|
||||
const data = serverSettings.length === 0 ? [] : serverSettings;
|
||||
|
||||
const migrationCompleted = data?.filter(
|
||||
(n) => n.name === "siloAdjMigrations"
|
||||
);
|
||||
const server = data?.filter((n) => n.name === "v1SysServer");
|
||||
const port = data?.filter((n) => n.name === "v1SysPort");
|
||||
createLog("info", "silo", "logistics", "Getting v1 silo data.");
|
||||
|
||||
if (migrationCompleted[0]?.value === "1") {
|
||||
createLog(
|
||||
"info",
|
||||
"silo",
|
||||
"logistics",
|
||||
"Migrations have already been completed on this server."
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
const { data: s, error: siloError } = await tryCatch(
|
||||
axios.get(
|
||||
`http://${server[0]?.value}:${port[0]?.value}/api/v1/warehouse/getSilosAdjustment?startDate=1/1/2020&endDate=4/1/2026`
|
||||
)
|
||||
);
|
||||
|
||||
if (siloError) {
|
||||
createLog("error", "silo", "logistics", "Getting settings.");
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate all the silo adjustments :D
|
||||
*/
|
||||
const silo: any = s?.data.data;
|
||||
createLog("info", "silo", "logistics", "Starting migration.");
|
||||
for (let i = 0; i < silo.length; i++) {
|
||||
const migrate = await db.insert(siloAdjustments).values({
|
||||
warehouseID: silo[i].warehouseID,
|
||||
locationID: silo[i].locationID,
|
||||
currentStockLevel: silo[i].currentStockLevel,
|
||||
newLevel: silo[i].newLevel,
|
||||
dateAdjusted: new Date(silo[i].dateAdjusted),
|
||||
lastDateAdjusted: new Date(silo[i].lastDateAdjusted),
|
||||
add_user: silo[i].add_user,
|
||||
});
|
||||
createLog(
|
||||
"info",
|
||||
"silo",
|
||||
"logistics",
|
||||
`Migrations for Date ${silo[0].dateAdjusted} on silo: ${silo[0].locationID}`
|
||||
);
|
||||
await delay(120);
|
||||
}
|
||||
|
||||
/**
|
||||
* change the migration setting to be completed
|
||||
*/
|
||||
|
||||
await db
|
||||
.update(settings)
|
||||
.set({ value: "1" })
|
||||
.where(eq(settings.name, "siloAdjMigrations"));
|
||||
createLog("info", "silo", "logistics", "Migration completed.");
|
||||
};
|
||||
@@ -0,0 +1,100 @@
|
||||
import axios from "axios";
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
|
||||
export const postAdjustment = async (data: any, prod: any) => {
|
||||
if (data.warehouseId === undefined) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Missing mandatory field: warehouseID`,
|
||||
data: { error: `Missing mandatory field: warehouseID` },
|
||||
};
|
||||
}
|
||||
|
||||
if (data.laneId === undefined) {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `Missing mandatory field: locationID`,
|
||||
data: { error: `Missing mandatory field: locationID` },
|
||||
};
|
||||
}
|
||||
|
||||
if (data.quantity == "0") {
|
||||
return {
|
||||
sucess: false,
|
||||
message: `You entered 0 for the quantity to post, quantity needs to be at leave 1`,
|
||||
data: {
|
||||
error: `You entered 0 for the quantity to post, quantity needs to be at leave 1`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const siloAdjustment = {
|
||||
warehouseId: data.warehouseId,
|
||||
laneId: data.laneId,
|
||||
quantity: data.quantity,
|
||||
};
|
||||
|
||||
let url = await prodEndpointCreation(
|
||||
"/public/v1.0/Warehousing/AdjustSiloStockLevel"
|
||||
);
|
||||
|
||||
const { data: silo, error } = await tryCatch(
|
||||
axios.post(url, siloAdjustment, {
|
||||
headers: {
|
||||
"X-API-Key": process.env.TEC_API_KEY || "",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
);
|
||||
let e = error as any;
|
||||
if (e) {
|
||||
console.log(e.response);
|
||||
if (e.status === 401) {
|
||||
const data = {
|
||||
success: false,
|
||||
message: `There was error posting the data: ${JSON.stringify(
|
||||
e.response?.data
|
||||
)}`,
|
||||
data: {
|
||||
status: e.response?.status,
|
||||
statusText: e.response?.statusText,
|
||||
data: e.response?.data,
|
||||
},
|
||||
};
|
||||
return data;
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error in posting the silo adjustment.",
|
||||
data: {
|
||||
status: e.response?.status,
|
||||
statusText: e.response?.statusText,
|
||||
data: e.response?.data,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (silo?.status !== 200) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error in posting the silo adjustment",
|
||||
data: {
|
||||
status: silo?.status,
|
||||
statusText: silo?.statusText,
|
||||
data: silo?.data,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: true,
|
||||
message: "Adjustment was completed",
|
||||
data: {
|
||||
status: silo.status,
|
||||
statusText: silo.statusText,
|
||||
data: silo.data,
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,64 @@
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { siloAdjustments } from "../../../../../database/schema/siloAdjustments.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
|
||||
export const postSiloComment = async (
|
||||
id: string,
|
||||
comment: string,
|
||||
commentk: string,
|
||||
user: any
|
||||
) => {
|
||||
/**
|
||||
* We will add the comment to the silo adjustment so we know the why we had this.
|
||||
*/
|
||||
|
||||
// make sure we havea valid key
|
||||
const { data: key, error: keyErro } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(siloAdjustments)
|
||||
.where(eq(siloAdjustments.siloAdjust_id, id))
|
||||
);
|
||||
|
||||
if (keyErro) {
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error getting the adjustment.",
|
||||
data: keyErro,
|
||||
};
|
||||
}
|
||||
|
||||
if (key[0].commentKey != commentk) {
|
||||
return {
|
||||
success: false,
|
||||
message: "The key you provided is invalid.",
|
||||
data: keyErro,
|
||||
};
|
||||
}
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(siloAdjustments)
|
||||
.set({
|
||||
comment: comment,
|
||||
commentAddedBy: user.username,
|
||||
commentDate: sql`NOW()`,
|
||||
commentKey: null,
|
||||
})
|
||||
.where(eq(siloAdjustments.siloAdjust_id, id))
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error adding the comment.",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Comment was successfully added.",
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,43 @@
|
||||
import { runProdApi } from "../../../../globalUtils/runProdApi.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
|
||||
export const attachSilo = async (data: any) => {
|
||||
/**
|
||||
* Detachs a silo
|
||||
*/
|
||||
|
||||
const detachData = {
|
||||
endpoint: "/public/v1.0/IssueMaterial/AssignSiloToMachine",
|
||||
data: [
|
||||
{
|
||||
laneId: data.laneId,
|
||||
machineId: data.machineId,
|
||||
productionLotId: data.productionLotId,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const { data: d, error } = await tryCatch(runProdApi(detachData));
|
||||
|
||||
if (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error processing attachingSilo data",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
if (!d.success) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error processing silo attach data",
|
||||
data: d.message,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "silo attach was completed",
|
||||
data: d.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,42 @@
|
||||
import { runProdApi } from "../../../../globalUtils/runProdApi.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
|
||||
export const detachSilo = async (data: any) => {
|
||||
/**
|
||||
* Detachs a silo
|
||||
*/
|
||||
|
||||
const detachData = {
|
||||
endpoint: "/public/v1.0/IssueMaterial/DetachSiloFromMachine",
|
||||
data: [
|
||||
{
|
||||
laneId: data.laneId,
|
||||
machineId: data.machineId,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const { data: d, error } = await tryCatch(runProdApi(detachData));
|
||||
|
||||
if (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error processing detach data",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
if (!d.success) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error processing detach data",
|
||||
data: d.message,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Detach was completed",
|
||||
data: d.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,63 @@
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import {
|
||||
connectedToMachine,
|
||||
notconnectedToMachine,
|
||||
} from "../../../sqlServer/querys/silo/connectionCheck.js";
|
||||
|
||||
type Data = {
|
||||
siloID: string;
|
||||
connectionType: string;
|
||||
};
|
||||
export const siloConnectionType = async (data: Data) => {
|
||||
/**
|
||||
* Will return the machines that are attached or detached based on the silo and connection type
|
||||
*/
|
||||
|
||||
if (!data) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Missing mandatory data",
|
||||
data: [{ error: "Missing siloId or ConnectionType" }],
|
||||
};
|
||||
}
|
||||
|
||||
// change the silo id to the correct one
|
||||
let newQuery = "";
|
||||
|
||||
if (data.connectionType === "connected") {
|
||||
newQuery = connectedToMachine.replace("[siloID]", data.siloID);
|
||||
} else {
|
||||
newQuery = notconnectedToMachine.replace("[siloID]", data.siloID);
|
||||
}
|
||||
|
||||
/**
|
||||
* get the silo data
|
||||
*/
|
||||
const { data: s, error } = (await tryCatch(
|
||||
query(newQuery, "Silo connection check")
|
||||
)) as any;
|
||||
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"logistics",
|
||||
`There was an error getting the silo connection data: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error getting the silo connection data.",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `silo ${data.connectionType} data`,
|
||||
data: s.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,24 @@
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { activeWarehouseLanes } from "../../../sqlServer/querys/warehouse/activeWarehouseLanes.js";
|
||||
|
||||
export const getActiveWarehouseLanes = async () => {
|
||||
const { data, error } = await tryCatch(
|
||||
query(activeWarehouseLanes, "Get active warehouse lanes")
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error getting active warehouse lanes",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
const lanes: any = data as any;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Current active warehouse lanes.",
|
||||
data: lanes.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,73 @@
|
||||
import { differenceInDays, differenceInSeconds, format } from "date-fns";
|
||||
import { timeZoneFix } from "../../../../../globalUtils/timeZoneFix.js";
|
||||
import { createLog } from "../../../../logger/logger.js";
|
||||
import { delay } from "../../../../../globalUtils/delay.js";
|
||||
import { tryCatch } from "../../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../../sqlServer/prodSqlServer.js";
|
||||
import { cycleCountCheck } from "../../../../sqlServer/querys/warehouse/cycleCountCheck.js";
|
||||
|
||||
// setting timer for updating stockCheck on a restart will always check.
|
||||
let lastCheck = 0;
|
||||
|
||||
export let lanes: any = [];
|
||||
|
||||
export const getLanesToCycleCount = async () => {
|
||||
const currentTime: any = timeZoneFix();
|
||||
// store the lanes in memeory
|
||||
createLog("info", "warehouse", "logistics", "Lane triggered update.");
|
||||
lastCheck = currentTime;
|
||||
const ageQuery = cycleCountCheck.replaceAll("[ageOfRow]", "90");
|
||||
const { data: p, error: pl } = await tryCatch(
|
||||
query(ageQuery, "Get Stock lane date.")
|
||||
);
|
||||
|
||||
if (pl) {
|
||||
createLog(
|
||||
"error",
|
||||
"warehouse",
|
||||
"logistics",
|
||||
`There was an error getting lanes: ${pl}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const prodLanes: any = p?.data;
|
||||
|
||||
// run the update on the lanes
|
||||
for (let i = 0; i < prodLanes.length; i++) {
|
||||
const createLane = {
|
||||
laneID: prodLanes[i]?.laneID,
|
||||
warehouseID: prodLanes[i]?.warehouseID,
|
||||
warehouseName: prodLanes[i]?.warehouseName || "na",
|
||||
Description: prodLanes[i]?.Description,
|
||||
LastMoveDate: prodLanes[i]?.LastMoveDate
|
||||
? format(prodLanes[i]?.LastMoveDate, "M/d/yyyy")
|
||||
: undefined,
|
||||
LastInv: format(prodLanes[i]?.LastInv, "M/d/yyyy"),
|
||||
rowType: prodLanes[i].rowType,
|
||||
DaysSinceLast: differenceInDays(
|
||||
new Date(Date.now()),
|
||||
new Date(prodLanes[i].LastInv)
|
||||
),
|
||||
upd_date: format(new Date(Date.now()), "M/d/yyyy"),
|
||||
};
|
||||
|
||||
const existing = lanes.filter(
|
||||
(l: any) => l.laneID === prodLanes[i]?.laneID
|
||||
);
|
||||
|
||||
if (existing) {
|
||||
lanes = lanes.filter((l: any) => l.laneID !== prodLanes[i]?.laneID);
|
||||
}
|
||||
|
||||
lanes.push(createLane);
|
||||
createLog(
|
||||
"debug",
|
||||
"warehouse",
|
||||
"logistics",
|
||||
`${lanes[i].Description} was just added`
|
||||
);
|
||||
await delay(10);
|
||||
//delay to slow this thing down
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,42 @@
|
||||
import * as XLSX from "xlsx";
|
||||
import { lanes } from "./cyclecountCheck.js";
|
||||
|
||||
export const lanesToExcel = async (age: string | null) => {
|
||||
// Convert JSON data to an array of arrays (AOA)
|
||||
|
||||
let processLanes = lanes;
|
||||
if (age) {
|
||||
processLanes = lanes.filter(
|
||||
(l: any) => l.DaysSinceLast >= parseInt(age)
|
||||
);
|
||||
}
|
||||
|
||||
const headers = Object.keys(processLanes[0]); // Get headers from JSON keys
|
||||
const data = processLanes.map((item: any) =>
|
||||
headers.map((header) => item[header])
|
||||
);
|
||||
|
||||
// Create the workbook and worksheet
|
||||
const wb = XLSX.utils.book_new();
|
||||
const ws = XLSX.utils.aoa_to_sheet([headers, ...data]); // Combine headers and data
|
||||
|
||||
// Auto-resize columns based on the longest content in each column
|
||||
const colWidths = headers.map((_, colIndex) => {
|
||||
let maxLength = 0;
|
||||
data.forEach((row: any) => {
|
||||
const cellValue = row[colIndex] ? row[colIndex].toString() : "";
|
||||
maxLength = Math.max(maxLength, cellValue.length);
|
||||
});
|
||||
return { wch: maxLength + 2 }; // Add a little padding
|
||||
});
|
||||
|
||||
ws["!cols"] = colWidths; // Set the column widths
|
||||
|
||||
// Add the worksheet to the workbook
|
||||
XLSX.utils.book_append_sheet(wb, ws, "CycleCount");
|
||||
|
||||
// Write the workbook to a buffer and return it
|
||||
const excelBuffer = XLSX.write(wb, { bookType: "xlsx", type: "buffer" });
|
||||
|
||||
return excelBuffer;
|
||||
};
|
||||
@@ -0,0 +1,41 @@
|
||||
import { lanes } from "./cyclecountCheck.js";
|
||||
|
||||
export const getCycleCountCheck = async (age: number = 1000, type: any) => {
|
||||
/**
|
||||
* Get the lane data based on the age and type
|
||||
*/
|
||||
|
||||
let filteredLanes = lanes;
|
||||
|
||||
if (type === "empty") {
|
||||
let empty = lanes.filter((t: any) => t.rowType === type.toUpperCase());
|
||||
|
||||
return {
|
||||
sucess: true,
|
||||
message: `${empty.length} lanes that are of type ${type}.`,
|
||||
data: empty.sort(
|
||||
(a: any, b: any) => b.DaysSinceLast - a.DaysSinceLast
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (type != "") {
|
||||
let noType = lanes.filter((t: any) => t.DaysSinceLast >= age);
|
||||
|
||||
return {
|
||||
sucess: true,
|
||||
message: `${noType.length} lanes that are of type ${type} and have not been cycle counted in the last ${age} days.`,
|
||||
data: noType
|
||||
.filter((t: any) => t.rowType === type?.toUpperCase())
|
||||
.sort((a: any, b: any) => b.DaysSinceLast - a.DaysSinceLast),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `${filteredLanes.length} lanes grabed that have not been cycle counted in the last ${age} days.`,
|
||||
data: filteredLanes.sort(
|
||||
(a: any, b: any) => b.DaysSinceLast - a.DaysSinceLast
|
||||
),
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,18 @@
|
||||
import { tryCatch } from "../../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../../sqlServer/prodSqlServer.js";
|
||||
import { ppooQuery } from "../../../../sqlServer/querys/warehouse/ppooQuery.js";
|
||||
|
||||
export const getPPOO = async () => {
|
||||
const { data, error } = await tryCatch(query(ppooQuery, "Get PPOO"));
|
||||
|
||||
const ppoo: any = data as any;
|
||||
if (error) {
|
||||
return { success: false, message: "Error getting ppoo", data: error };
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Current pallets in PPOO.",
|
||||
data: ppoo.data,
|
||||
};
|
||||
};
|
||||
Reference in New Issue
Block a user