refactor(psi): planning numbers refactored to deal with a bad downsync that caused negative numbers

This commit is contained in:
2026-01-08 20:08:31 -06:00
parent b15d0d7322
commit 124fde07e0
5 changed files with 463 additions and 266 deletions

View File

@@ -0,0 +1,22 @@
meta {
name: PSI -planning data
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/datamart/psiplanningdata?avs=118,120&startDate=12/1/2025&endDate=12/31/2026
body: none
auth: inherit
}
params:query {
avs: 118,120
startDate: 12/1/2025
endDate: 12/31/2026
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -1,5 +1,5 @@
vars { vars {
url: https://bow1prod.alpla.net url: http://localhost:4200
session_cookie: session_cookie:
urlv2: http://usbow1vms006:3000 urlv2: http://usbow1vms006:3000
jwtV2: jwtV2:

View File

@@ -2,62 +2,71 @@ import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js"; import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js"; import { query } from "../../sqlServer/prodSqlServer.js";
import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js"; import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js";
import { improvedPsiPlanningInfo } from "./psiPlanningDataImproved.js";
// type ArticleData = { // type ArticleData = {
// id: string // id: string
// } // }
export const psiGetPlanningData = async ( export const psiGetPlanningData = async (
avs: string, avs: string,
startDate: string, startDate: string,
endDate: string endDate: string,
) => { ) => {
let articles: any = []; let articles: any = [];
if (!avs) { if (!avs) {
return { return {
success: false, success: false,
message: `Missing av's please send at least one over`, message: `Missing av's please send at least one over`,
data: [], data: [],
}; };
} }
const { data, error } = (await tryCatch( const { data, error } = (await tryCatch(
query( query(
planningNumbersByAVDate planningNumbersByAVDate
.replace("[articles]", avs) .replace("[articles]", avs)
.replace("[startDate]", startDate) .replace("[startDate]", startDate)
.replace("[endDate]", endDate), .replace("[endDate]", endDate),
"PSI planning info" "PSI planning info",
) ),
)) as any; )) as any;
if (error) { // improvedPsiPlanningInfo({
createLog( // avs,
"error", // startDate,
"datamart", // endDate,
"datamart", // });
`There was an error getting the planning info: ${JSON.stringify( if (error) {
error createLog(
)}` "error",
); "datamart",
return { "datamart",
success: false, `There was an error getting the planning info: ${JSON.stringify(error)}`,
messsage: `There was an error getting the planning info`, );
data: error, return {
}; success: false,
} messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data.data; articles = data.data;
return { return {
success: true, success: true,
message: "PSI planning Data", message: "PSI planning Data",
data: articles.map((n: any) => { data: await improvedPsiPlanningInfo({
if (n.PalDay) { avs,
return { ...n, PalDay: n.PalDay.toFixed(2) }; startDate,
} endDate,
}),
// data: articles.map((n: any) => {
// if (n.PalDay) {
// return { ...n, PalDay: n.PalDay.toFixed(2) };
// }
return n; // return n;
}), // }),
}; };
}; };

View File

@@ -0,0 +1,170 @@
import { format } from "date-fns-tz";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
const improvedQuery = `
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
SELECT
[RunningNumber] as lot
,[ProfitCentreDescription]
,[MachineDescription]
,[ArticleHumanReadableId]
,[ArticleDescription]
,[DeliveryAddressHumanReadableId]
,[DeliveryAddressDescription]
,[MouldHumanReadableId]
,[BlowheadHumanReadableId1]
,[PackagingInstructionHumanReadableId]
,[PackagingInstructionDescription]
,[MainMaterialHumanReadableId]
,[MainMaterialDescription]
,[CompoundHumanReadableId]
,[CompoundDescription]
,[ProductionLotState]
,[PlanType]
,[ProducedQuantityLoadingUnit]
,[ProducedQuantityPieces]
,[PlanStart]
,[PlanEnd]
,[ProdStart]
,[TheoreticEnd]
,[ProdDuration]
,[SetupDuration]
,[StartupDuration]
,[NetEquipmentEfficiency]
,[UtilisationDuration]
,[CycleTime]
,[Cavities]
,[FixedQuantity]
,[ProducedQuantityTrucks]
,[ProducedQuantityTradeUnit]
,[MaxRegrind]
,[Conflict]
,[ProductionOrderHumanReadableId]
,[ProductionDataImportSource]
,[Remark]
,[BlowheadDescription1]
,[MouldDescription]
,[ProcessLossPercentage]
,[SetupTypeNumberOfPersons]
,[UnplannedDowntimePercentage]
,[PlanQuantityLoadingUnit]
,[PlanQuantityPieces]
,[PlanQuantityTradeUnit]
,[PlanQuantityTrucks]
,[PublishState]
,[LastChange]
,[MaterialConsumed]
,[MaterialStaged]
,[MachineLocation]
,[HasPrioritization]
,[ArticleAlias]
FROM [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] with (nolock)
where TheoreticEnd between @StartDate and @EndDate
and ArticleHumanReadableId in ([articles])
and PublishState = 1
order by PlanStart
`;
export const improvedPsiPlanningInfo = async (something: any) => {
const { data, error } = (await tryCatch(
query(
improvedQuery
.replace("[articles]", something.avs)
.replace("[startDate]", something.startDate)
.replace("[endDate]", something.endDate),
"PSI planning info",
),
)) as any;
// add error handling in later here
return splitProduction(data.data);
};
const splitProduction = (runs: any) => {
const results: any = [];
const WORKDAY_START_HOUR = 7; // 07:00 start well later get this from the shift def
runs.forEach((e: any) => {
const {
PlanStart,
PlanEnd,
PlanQuantityPieces,
ArticleHumanReadableId,
ProdDuration,
} = e;
const prodStart: any = new Date(PlanStart);
const prodEnd: any = new Date(PlanEnd);
const prodDuration = ProdDuration
? ProdDuration * 60 * 60 * 1000
: prodEnd - prodStart;
// get the prod date the production falls under
function getProdDayStart(date: Date) {
const d = new Date(date);
d.setHours(WORKDAY_START_HOUR, 0, 0, 0);
if (date.getHours() < WORKDAY_START_HOUR) {
// before 07:00, belongs to previous calendar day
d.setDate(d.getDate() - 1);
}
return d;
}
// current pointer starts at the work-day start that contains our start time
let currentStart = new Date(prodStart);
let prodDayStart = getProdDayStart(currentStart);
while (prodDayStart < prodEnd) {
// 1⃣ The next days start = prodDayStart + 1 day at 07:00
const nextProdDayStart = new Date(prodDayStart);
nextProdDayStart.setDate(nextProdDayStart.getDate() + 1);
// 2⃣ Segment end is either the next work-day start or the actual end, whichever is sooner
const segmentEnd = new Date(
Math.min(nextProdDayStart.getTime(), prodEnd.getTime()),
);
// 3⃣ Determine overlap window within (startTime..endTime)
const segStart: any = new Date(
Math.max(prodDayStart.getTime(), prodStart.getTime()),
);
const segEnd: any = segmentEnd;
if (segEnd > segStart) {
const segMs = segEnd - segStart;
const proportion = segMs / prodDuration;
const qty = PlanQuantityPieces * proportion;
const pal = e.PlanQuantityLoadingUnit * proportion;
results.push({
Article: ArticleHumanReadableId,
Description: e.ArticleAlias,
MachineId: e.MachineLocation,
MachineName: e.MachineDescription,
LotNumber: e.lot,
ProductionDay: format(prodDayStart, "M/d/yyyy"),
TotalPlanned: e.PlanQuantityPieces,
// PlanEnd,
// TheoreticEnd,
QTYPerDay: parseInt(qty.toFixed(0)),
PalDay: parseFloat(pal.toFixed(2)),
finished: e.ProductionLotState === 3 ? 1 : 0,
//prodDuration,
});
}
// move to next production-day window
prodDayStart = nextProdDayStart;
}
});
return results;
};

View File

@@ -1,252 +1,248 @@
import { eq } from "drizzle-orm";
import sql from "mssql"; import sql from "mssql";
import { prodSqlConfig } from "./utils/prodServerConfig.js";
import { createLog } from "../logger/logger.js";
import { db } from "../../../database/dbclient.js"; import { db } from "../../../database/dbclient.js";
import { settings } from "../../../database/schema/settings.js"; import { settings } from "../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { installed } from "../../index.js";
import { checkHostnamePort } from "../../globalUtils/pingServer.js"; import { checkHostnamePort } from "../../globalUtils/pingServer.js";
import { installed } from "../../index.js";
import { createLog } from "../logger/logger.js";
import { serverSettings } from "../server/controller/settings/getSettings.js"; import { serverSettings } from "../server/controller/settings/getSettings.js";
import { prodSqlConfig } from "./utils/prodServerConfig.js";
let pool: any; let pool: any;
let connected: boolean = false; let connected: boolean = false;
export const initializeProdPool = async () => { export const initializeProdPool = async () => {
if (!installed) { if (!installed) {
createLog( createLog(
"info", "info",
"lst", "lst",
"sqlProd", "sqlProd",
"The server was not installed will reconnect in 5 seconds" "The server was not installed will reconnect in 5 seconds",
); );
setTimeout(() => { setTimeout(() => {
initializeProdPool(); initializeProdPool();
}, 5 * 1000); }, 5 * 1000);
return { success: false, message: "The server is not installed." }; return { success: false, message: "The server is not installed." };
} }
// const dbServer = await db // const dbServer = await db
// .select() // .select()
// .from(settings) // .from(settings)
// .where(eq(settings.name, "dbServer")); // .where(eq(settings.name, "dbServer"));
// the move to the go version for settings // the move to the go version for settings
const dbServer = serverSettings.filter( const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer" (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
if (!serverUp) { if (!serverUp) {
createLog( createLog(
"error", "error",
"lst", "lst",
"server", "server",
`The sql ${dbServer[0].value} is not reachable` `The sql ${dbServer[0].value} is not reachable`,
); );
closePool() // closePool()
setTimeout(() => { // setTimeout(() => {
initializeProdPool(); // initializeProdPool();
}, 2*1000); // }, 2*1000);
return { return {
success: false, success: false,
message: `The sql ${dbServer[0].value} is not reachable`, message: `The sql ${dbServer[0].value} is not reachable`,
data: [], data: [],
}; };
} }
// make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev // make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev
// const serverLoc = await db // const serverLoc = await db
// .select() // .select()
// .from(settings) // .from(settings)
// .where(eq(settings.name, "dbServer")); // .where(eq(settings.name, "dbServer"));
const serverLoc = serverSettings.filter( const serverLoc = serverSettings.filter(
(n: any) => n.name === "dbServer" (n: any) => n.name === "dbServer",
) as any; ) as any;
if ( if (
serverLoc[0].value === "localhost" && serverLoc[0].value === "localhost" &&
process.env.NODE_ENV !== "development" process.env.NODE_ENV !== "development"
) { ) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
"The server is set to localhost, and you are not in development mode." "The server is set to localhost, and you are not in development mode.",
); );
return { return {
success: false, success: false,
message: message:
"The server is set to localhost, and you are not in development mode.", "The server is set to localhost, and you are not in development mode.",
data: [], data: [],
}; };
} }
// if you were restarting from the endpoint you get this lovely error // if you were restarting from the endpoint you get this lovely error
if (connected) { if (connected) {
createLog("error", "lst", "sqlProd", "There is already a connection."); createLog("error", "lst", "sqlProd", "There is already a connection.");
return { success: false, message: "There is already a connection." }; return { success: false, message: "There is already a connection." };
} }
try { try {
const config = await prodSqlConfig(); const config = await prodSqlConfig();
pool = await sql.connect(config!); pool = await sql.connect(config!);
createLog( createLog(
"info", "info",
"lst", "lst",
"sqlProd", "sqlProd",
`Connected to ${config?.server}, and looking at ${config?.database}` `Connected to ${config?.server}, and looking at ${config?.database}`,
); );
connected = true; connected = true;
return { return {
success: true, success: true,
message: "The sql server connection has been closed", message: "The sql server connection has been closed",
}; };
} catch (error) { } catch (error) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(error)}, "There was an error connecting to the pool."`,
error );
)}, "There was an error connecting to the pool."` // closePool()
); // setTimeout(() => {
closePool() // initializeProdPool();
setTimeout(() => { // }, 2*1000);
initializeProdPool();
}, 2*1000);
return { return {
success: true, success: true,
message: "The sql server connection has been closed", message: "The sql server connection has been closed",
} };
//throw new Error("There was an error closing the sql connection"); //throw new Error("There was an error closing the sql connection");
} }
}; };
export const closePool = async () => { export const closePool = async () => {
if (!connected) { if (!connected) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
"There is no connection a connection." "There is no connection a connection.",
); );
return { success: false, message: "There is already a connection." }; return { success: false, message: "There is already a connection." };
} }
try { try {
await pool.close(); await pool.close();
createLog("info", "lst", "sqlProd", "Connection pool closed"); createLog("info", "lst", "sqlProd", "Connection pool closed");
connected = false; connected = false;
return { return {
success: true, success: true,
message: "The sql server connection has been closed", message: "The sql server connection has been closed",
}; };
} catch (error) { } catch (error) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(
error error,
)}, "There was an error closing the sql connection"` )}, "There was an error closing the sql connection"`,
); );
throw new Error("There was an error closing the sql connection"); throw new Error("There was an error closing the sql connection");
} }
}; };
export async function query(queryToRun: string, name: string) { export async function query(queryToRun: string, name: string) {
/** /**
* Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server * Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server
*/ */
// const dbServer = await db // const dbServer = await db
// .select() // .select()
// .from(settings) // .from(settings)
// .where(eq(settings.name, "dbServer")); // .where(eq(settings.name, "dbServer"));
const dbServer = serverSettings.filter( const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer" (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
if (!serverUp) { if (!serverUp) {
createLog( createLog(
"error", "error",
"lst", "lst",
"server", "server",
`The sql ${dbServer[0].value} is not reachable` `The sql ${dbServer[0].value} is not reachable`,
); );
return { return {
success: false, success: false,
message: `The sql ${dbServer[0].value} is not reachable`, message: `The sql ${dbServer[0].value} is not reachable`,
data: [], data: [],
}; };
} }
if (!connected) { if (!connected) {
createLog( createLog(
"error", "error",
"lst", "lst",
"server", "server",
`The sql ${dbServer[0].value} is not connected` `The sql ${dbServer[0].value} is not connected`,
); );
return { return {
success: false, success: false,
message: `The sql ${dbServer[0].value} is not not connected`, message: `The sql ${dbServer[0].value} is not not connected`,
data: [], data: [],
}; };
} }
/** /**
* We no longer need to send over the plant token change as we do it inside the query function. * We no longer need to send over the plant token change as we do it inside the query function.
*/ */
// const plantToken = await db // const plantToken = await db
// .select() // .select()
// .from(settings) // .from(settings)
// .where(eq(settings.name, "plantToken")); // .where(eq(settings.name, "plantToken"));
const plantToken = serverSettings.filter( const plantToken = serverSettings.filter(
(n: any) => n.name === "plantToken" (n: any) => n.name === "plantToken",
) as any; ) as any;
const query = queryToRun.replaceAll("test1", plantToken[0].value); const query = queryToRun.replaceAll("test1", plantToken[0].value);
try { try {
const result = await pool.request().query(query); const result = await pool.request().query(query);
return { return {
success: true, success: true,
message: `Query results for: ${name}`, message: `Query results for: ${name}`,
data: result.recordset, data: result.recordset,
}; };
} catch (error: any) { } catch (error: any) {
if (error.code === "ETIMEOUT") { if (error.code === "ETIMEOUT") {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(error)}, ${name} did not run due to a timeout.`,
error );
)}, ${name} did not run due to a timeout.` //throw new Error(`${name} query did not run due to a timeout.`);
); return {
//throw new Error(`${name} query did not run due to a timeout.`); success: false,
return { message: `${name} query did not run due to a timeout.`,
success: false, data: [],
message: `${name} query did not run due to a timeout.`, };
data: [], }
};
}
if (error.code === "EREQUEST") { if (error.code === "EREQUEST") {
// throw new Error( // throw new Error(
// `${name} encoutnered an error ${error.originalError.info.message}` // `${name} encoutnered an error ${error.originalError.info.message}`
// ); // );
return { return {
success: false, success: false,
message: `${name} encoutnered an error ${error.originalError.info.message}`, message: `${name} encoutnered an error ${error.originalError.info.message}`,
data: [], data: [],
}; };
} }
//console.log(error.originalError.info.message); //console.log(error.originalError.info.message);
//EREQUEST //EREQUEST
//throw new Error(`${name} encoutnered an error ${error.code}`); //throw new Error(`${name} encoutnered an error ${error.code}`);
} }
} }