refactor(psi): planning numbers refactored to deal with a bad downsync that caused negative numbers

This commit is contained in:
2026-01-08 20:08:31 -06:00
parent b15d0d7322
commit 124fde07e0
5 changed files with 463 additions and 266 deletions

View File

@@ -0,0 +1,22 @@
meta {
name: PSI -planning data
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/datamart/psiplanningdata?avs=118,120&startDate=12/1/2025&endDate=12/31/2026
body: none
auth: inherit
}
params:query {
avs: 118,120
startDate: 12/1/2025
endDate: 12/31/2026
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -1,5 +1,5 @@
vars {
url: https://bow1prod.alpla.net
url: http://localhost:4200
session_cookie:
urlv2: http://usbow1vms006:3000
jwtV2:

View File

@@ -2,62 +2,71 @@ import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js";
import { improvedPsiPlanningInfo } from "./psiPlanningDataImproved.js";
// type ArticleData = {
// id: string
// }
export const psiGetPlanningData = async (
avs: string,
startDate: string,
endDate: string
avs: string,
startDate: string,
endDate: string,
) => {
let articles: any = [];
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const { data, error } = (await tryCatch(
query(
planningNumbersByAVDate
.replace("[articles]", avs)
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"PSI planning info"
)
)) as any;
const { data, error } = (await tryCatch(
query(
planningNumbersByAVDate
.replace("[articles]", avs)
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"PSI planning info",
),
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
// improvedPsiPlanningInfo({
// avs,
// startDate,
// endDate,
// });
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(error)}`,
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data.data;
articles = data.data;
return {
success: true,
message: "PSI planning Data",
data: articles.map((n: any) => {
if (n.PalDay) {
return { ...n, PalDay: n.PalDay.toFixed(2) };
}
return {
success: true,
message: "PSI planning Data",
data: await improvedPsiPlanningInfo({
avs,
startDate,
endDate,
}),
// data: articles.map((n: any) => {
// if (n.PalDay) {
// return { ...n, PalDay: n.PalDay.toFixed(2) };
// }
return n;
}),
};
// return n;
// }),
};
};

View File

@@ -0,0 +1,170 @@
import { format } from "date-fns-tz";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
const improvedQuery = `
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
SELECT
[RunningNumber] as lot
,[ProfitCentreDescription]
,[MachineDescription]
,[ArticleHumanReadableId]
,[ArticleDescription]
,[DeliveryAddressHumanReadableId]
,[DeliveryAddressDescription]
,[MouldHumanReadableId]
,[BlowheadHumanReadableId1]
,[PackagingInstructionHumanReadableId]
,[PackagingInstructionDescription]
,[MainMaterialHumanReadableId]
,[MainMaterialDescription]
,[CompoundHumanReadableId]
,[CompoundDescription]
,[ProductionLotState]
,[PlanType]
,[ProducedQuantityLoadingUnit]
,[ProducedQuantityPieces]
,[PlanStart]
,[PlanEnd]
,[ProdStart]
,[TheoreticEnd]
,[ProdDuration]
,[SetupDuration]
,[StartupDuration]
,[NetEquipmentEfficiency]
,[UtilisationDuration]
,[CycleTime]
,[Cavities]
,[FixedQuantity]
,[ProducedQuantityTrucks]
,[ProducedQuantityTradeUnit]
,[MaxRegrind]
,[Conflict]
,[ProductionOrderHumanReadableId]
,[ProductionDataImportSource]
,[Remark]
,[BlowheadDescription1]
,[MouldDescription]
,[ProcessLossPercentage]
,[SetupTypeNumberOfPersons]
,[UnplannedDowntimePercentage]
,[PlanQuantityLoadingUnit]
,[PlanQuantityPieces]
,[PlanQuantityTradeUnit]
,[PlanQuantityTrucks]
,[PublishState]
,[LastChange]
,[MaterialConsumed]
,[MaterialStaged]
,[MachineLocation]
,[HasPrioritization]
,[ArticleAlias]
FROM [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] with (nolock)
where TheoreticEnd between @StartDate and @EndDate
and ArticleHumanReadableId in ([articles])
and PublishState = 1
order by PlanStart
`;
export const improvedPsiPlanningInfo = async (something: any) => {
const { data, error } = (await tryCatch(
query(
improvedQuery
.replace("[articles]", something.avs)
.replace("[startDate]", something.startDate)
.replace("[endDate]", something.endDate),
"PSI planning info",
),
)) as any;
// add error handling in later here
return splitProduction(data.data);
};
const splitProduction = (runs: any) => {
const results: any = [];
const WORKDAY_START_HOUR = 7; // 07:00 start well later get this from the shift def
runs.forEach((e: any) => {
const {
PlanStart,
PlanEnd,
PlanQuantityPieces,
ArticleHumanReadableId,
ProdDuration,
} = e;
const prodStart: any = new Date(PlanStart);
const prodEnd: any = new Date(PlanEnd);
const prodDuration = ProdDuration
? ProdDuration * 60 * 60 * 1000
: prodEnd - prodStart;
// get the prod date the production falls under
function getProdDayStart(date: Date) {
const d = new Date(date);
d.setHours(WORKDAY_START_HOUR, 0, 0, 0);
if (date.getHours() < WORKDAY_START_HOUR) {
// before 07:00, belongs to previous calendar day
d.setDate(d.getDate() - 1);
}
return d;
}
// current pointer starts at the work-day start that contains our start time
let currentStart = new Date(prodStart);
let prodDayStart = getProdDayStart(currentStart);
while (prodDayStart < prodEnd) {
// 1⃣ The next days start = prodDayStart + 1 day at 07:00
const nextProdDayStart = new Date(prodDayStart);
nextProdDayStart.setDate(nextProdDayStart.getDate() + 1);
// 2⃣ Segment end is either the next work-day start or the actual end, whichever is sooner
const segmentEnd = new Date(
Math.min(nextProdDayStart.getTime(), prodEnd.getTime()),
);
// 3⃣ Determine overlap window within (startTime..endTime)
const segStart: any = new Date(
Math.max(prodDayStart.getTime(), prodStart.getTime()),
);
const segEnd: any = segmentEnd;
if (segEnd > segStart) {
const segMs = segEnd - segStart;
const proportion = segMs / prodDuration;
const qty = PlanQuantityPieces * proportion;
const pal = e.PlanQuantityLoadingUnit * proportion;
results.push({
Article: ArticleHumanReadableId,
Description: e.ArticleAlias,
MachineId: e.MachineLocation,
MachineName: e.MachineDescription,
LotNumber: e.lot,
ProductionDay: format(prodDayStart, "M/d/yyyy"),
TotalPlanned: e.PlanQuantityPieces,
// PlanEnd,
// TheoreticEnd,
QTYPerDay: parseInt(qty.toFixed(0)),
PalDay: parseFloat(pal.toFixed(2)),
finished: e.ProductionLotState === 3 ? 1 : 0,
//prodDuration,
});
}
// move to next production-day window
prodDayStart = nextProdDayStart;
}
});
return results;
};

View File

@@ -1,252 +1,248 @@
import { eq } from "drizzle-orm";
import sql from "mssql";
import { prodSqlConfig } from "./utils/prodServerConfig.js";
import { createLog } from "../logger/logger.js";
import { db } from "../../../database/dbclient.js";
import { settings } from "../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { installed } from "../../index.js";
import { checkHostnamePort } from "../../globalUtils/pingServer.js";
import { installed } from "../../index.js";
import { createLog } from "../logger/logger.js";
import { serverSettings } from "../server/controller/settings/getSettings.js";
import { prodSqlConfig } from "./utils/prodServerConfig.js";
let pool: any;
let connected: boolean = false;
export const initializeProdPool = async () => {
if (!installed) {
createLog(
"info",
"lst",
"sqlProd",
"The server was not installed will reconnect in 5 seconds"
);
setTimeout(() => {
initializeProdPool();
}, 5 * 1000);
if (!installed) {
createLog(
"info",
"lst",
"sqlProd",
"The server was not installed will reconnect in 5 seconds",
);
setTimeout(() => {
initializeProdPool();
}, 5 * 1000);
return { success: false, message: "The server is not installed." };
}
// const dbServer = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
return { success: false, message: "The server is not installed." };
}
// const dbServer = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
// the move to the go version for settings
const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer"
) as any;
// the move to the go version for settings
const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer",
) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
if (!serverUp) {
createLog(
"error",
"lst",
"server",
`The sql ${dbServer[0].value} is not reachable`
);
closePool()
setTimeout(() => {
initializeProdPool();
}, 2*1000);
return {
success: false,
message: `The sql ${dbServer[0].value} is not reachable`,
data: [],
};
}
if (!serverUp) {
createLog(
"error",
"lst",
"server",
`The sql ${dbServer[0].value} is not reachable`,
);
// closePool()
// setTimeout(() => {
// initializeProdPool();
// }, 2*1000);
return {
success: false,
message: `The sql ${dbServer[0].value} is not reachable`,
data: [],
};
}
// make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev
// const serverLoc = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
// make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev
// const serverLoc = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
const serverLoc = serverSettings.filter(
(n: any) => n.name === "dbServer"
) as any;
if (
serverLoc[0].value === "localhost" &&
process.env.NODE_ENV !== "development"
) {
createLog(
"error",
"lst",
"sqlProd",
"The server is set to localhost, and you are not in development mode."
);
return {
success: false,
message:
"The server is set to localhost, and you are not in development mode.",
data: [],
};
}
const serverLoc = serverSettings.filter(
(n: any) => n.name === "dbServer",
) as any;
if (
serverLoc[0].value === "localhost" &&
process.env.NODE_ENV !== "development"
) {
createLog(
"error",
"lst",
"sqlProd",
"The server is set to localhost, and you are not in development mode.",
);
return {
success: false,
message:
"The server is set to localhost, and you are not in development mode.",
data: [],
};
}
// if you were restarting from the endpoint you get this lovely error
if (connected) {
createLog("error", "lst", "sqlProd", "There is already a connection.");
return { success: false, message: "There is already a connection." };
}
try {
const config = await prodSqlConfig();
pool = await sql.connect(config!);
// if you were restarting from the endpoint you get this lovely error
if (connected) {
createLog("error", "lst", "sqlProd", "There is already a connection.");
return { success: false, message: "There is already a connection." };
}
try {
const config = await prodSqlConfig();
pool = await sql.connect(config!);
createLog(
"info",
"lst",
"sqlProd",
`Connected to ${config?.server}, and looking at ${config?.database}`
);
connected = true;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
createLog(
"error",
"lst",
"sqlProd",
`${JSON.stringify(
error
)}, "There was an error connecting to the pool."`
);
closePool()
setTimeout(() => {
initializeProdPool();
}, 2*1000);
createLog(
"info",
"lst",
"sqlProd",
`Connected to ${config?.server}, and looking at ${config?.database}`,
);
connected = true;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
createLog(
"error",
"lst",
"sqlProd",
`${JSON.stringify(error)}, "There was an error connecting to the pool."`,
);
// closePool()
// setTimeout(() => {
// initializeProdPool();
// }, 2*1000);
return {
success: true,
message: "The sql server connection has been closed",
}
//throw new Error("There was an error closing the sql connection");
}
return {
success: true,
message: "The sql server connection has been closed",
};
//throw new Error("There was an error closing the sql connection");
}
};
export const closePool = async () => {
if (!connected) {
createLog(
"error",
"lst",
"sqlProd",
"There is no connection a connection."
);
return { success: false, message: "There is already a connection." };
}
try {
await pool.close();
createLog("info", "lst", "sqlProd", "Connection pool closed");
connected = false;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
createLog(
"error",
"lst",
"sqlProd",
`${JSON.stringify(
error
)}, "There was an error closing the sql connection"`
);
throw new Error("There was an error closing the sql connection");
}
if (!connected) {
createLog(
"error",
"lst",
"sqlProd",
"There is no connection a connection.",
);
return { success: false, message: "There is already a connection." };
}
try {
await pool.close();
createLog("info", "lst", "sqlProd", "Connection pool closed");
connected = false;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
createLog(
"error",
"lst",
"sqlProd",
`${JSON.stringify(
error,
)}, "There was an error closing the sql connection"`,
);
throw new Error("There was an error closing the sql connection");
}
};
export async function query(queryToRun: string, name: string) {
/**
* Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server
*/
// const dbServer = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
/**
* Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server
*/
// const dbServer = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer"
) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer",
) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
if (!serverUp) {
createLog(
"error",
"lst",
"server",
`The sql ${dbServer[0].value} is not reachable`
);
return {
success: false,
message: `The sql ${dbServer[0].value} is not reachable`,
data: [],
};
}
if (!serverUp) {
createLog(
"error",
"lst",
"server",
`The sql ${dbServer[0].value} is not reachable`,
);
return {
success: false,
message: `The sql ${dbServer[0].value} is not reachable`,
data: [],
};
}
if (!connected) {
createLog(
"error",
"lst",
"server",
`The sql ${dbServer[0].value} is not connected`
);
if (!connected) {
createLog(
"error",
"lst",
"server",
`The sql ${dbServer[0].value} is not connected`,
);
return {
success: false,
message: `The sql ${dbServer[0].value} is not not connected`,
data: [],
};
}
/**
* We no longer need to send over the plant token change as we do it inside the query function.
*/
// const plantToken = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "plantToken"));
const plantToken = serverSettings.filter(
(n: any) => n.name === "plantToken"
) as any;
const query = queryToRun.replaceAll("test1", plantToken[0].value);
return {
success: false,
message: `The sql ${dbServer[0].value} is not not connected`,
data: [],
};
}
/**
* We no longer need to send over the plant token change as we do it inside the query function.
*/
// const plantToken = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "plantToken"));
const plantToken = serverSettings.filter(
(n: any) => n.name === "plantToken",
) as any;
const query = queryToRun.replaceAll("test1", plantToken[0].value);
try {
const result = await pool.request().query(query);
try {
const result = await pool.request().query(query);
return {
success: true,
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
if (error.code === "ETIMEOUT") {
createLog(
"error",
"lst",
"sqlProd",
`${JSON.stringify(
error
)}, ${name} did not run due to a timeout.`
);
//throw new Error(`${name} query did not run due to a timeout.`);
return {
success: false,
message: `${name} query did not run due to a timeout.`,
data: [],
};
}
return {
success: true,
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
if (error.code === "ETIMEOUT") {
createLog(
"error",
"lst",
"sqlProd",
`${JSON.stringify(error)}, ${name} did not run due to a timeout.`,
);
//throw new Error(`${name} query did not run due to a timeout.`);
return {
success: false,
message: `${name} query did not run due to a timeout.`,
data: [],
};
}
if (error.code === "EREQUEST") {
// throw new Error(
// `${name} encoutnered an error ${error.originalError.info.message}`
// );
return {
success: false,
message: `${name} encoutnered an error ${error.originalError.info.message}`,
data: [],
};
}
if (error.code === "EREQUEST") {
// throw new Error(
// `${name} encoutnered an error ${error.originalError.info.message}`
// );
return {
success: false,
message: `${name} encoutnered an error ${error.originalError.info.message}`,
data: [],
};
}
//console.log(error.originalError.info.message);
//EREQUEST
//throw new Error(`${name} encoutnered an error ${error.code}`);
}
//console.log(error.originalError.info.message);
//EREQUEST
//throw new Error(`${name} encoutnered an error ${error.code}`);
}
}