feat(eom): added in historical inv data split accordingly

This commit is contained in:
2025-05-28 17:01:22 -05:00
parent 25cfee58d0
commit 96deca15f0
16 changed files with 6385 additions and 36 deletions

View File

@@ -0,0 +1,16 @@
CREATE TABLE "invHistoricalData" (
"inv_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"histDate" date NOT NULL,
"plantToken" text,
"article" text NOT NULL,
"articleDescription" text NOT NULL,
"materialType" text,
"total_QTY" integer,
"avaliable_QTY" integer,
"coa_QTY" integer,
"held_QTY" integer,
"consignment" integer,
"location" integer,
"upd_user" text,
"upd_date" timestamp
);

View File

@@ -0,0 +1,5 @@
ALTER TABLE "invHistoricalData" ALTER COLUMN "total_QTY" SET DATA TYPE text;--> statement-breakpoint
ALTER TABLE "invHistoricalData" ALTER COLUMN "coa_QTY" SET DATA TYPE text;--> statement-breakpoint
ALTER TABLE "invHistoricalData" ALTER COLUMN "held_QTY" SET DATA TYPE text;--> statement-breakpoint
ALTER TABLE "invHistoricalData" ALTER COLUMN "consignment" SET DATA TYPE text;--> statement-breakpoint
ALTER TABLE "invHistoricalData" ALTER COLUMN "location" SET DATA TYPE text;

View File

@@ -0,0 +1 @@
ALTER TABLE "invHistoricalData" ALTER COLUMN "avaliable_QTY" SET DATA TYPE text;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -435,6 +435,27 @@
"when": 1748370693078, "when": 1748370693078,
"tag": "0061_mature_the_stranger", "tag": "0061_mature_the_stranger",
"breakpoints": true "breakpoints": true
},
{
"idx": 62,
"version": "7",
"when": 1748462132080,
"tag": "0062_narrow_silver_centurion",
"breakpoints": true
},
{
"idx": 63,
"version": "7",
"when": 1748463780733,
"tag": "0063_powerful_revanche",
"breakpoints": true
},
{
"idx": 64,
"version": "7",
"when": 1748464203006,
"tag": "0064_aberrant_blindfold",
"breakpoints": true
} }
] ]
} }

View File

@@ -1,12 +1,43 @@
import {date, pgTable, text} from "drizzle-orm/pg-core"; import {
import {createSelectSchema} from "drizzle-zod"; date,
integer,
pgTable,
text,
timestamp,
uuid,
} from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
export const eom = pgTable( export const eom = pgTable(
"eom", "eom",
{ {
eomMonth: date().notNull(), eom_id: uuid("eom_id").defaultRandom().primaryKey(),
article: text().notNull(), eomMonth: date("eomMonth").notNull(), // what month are we running in should just be the first of current month
articleDescription: text().notNull(), plantToken: text("plantToken"),
article: text("article").notNull(),
articleDescription: text("articleDescription").notNull(),
materialType: text("materialType"),
invStart: integer("invStart"), // this will come from the previous month
invEnd: integer("invEnd"),
intransit: integer("intransit"),
// pass over a calculation for ending inv
purchase: integer("purchase"),
gpRecived: integer("gpRecived"),
// pass calcuation for difference
materialIn: integer("materialIn"), // from other alpla plants
materialOut: integer("materialOut"), // out to other alpla plants
quarantine: integer("quarantine"),
// calcualtion for actaul consumption
prodConsumption: integer("prodConsumption"),
// difference will be a calculated number
// waste will be calculated.
priceKg: text("priceKg"), // will be converted to a float and then calcuated into the data
// loss/gain calcualtion
comments: text("comments"),
weight: text("weight"), // for calculations should be converted to a float
pfc: text("pfc"), // profit center this will belong too.
upd_user: text("upd_user"),
upd_date: timestamp("upd_date"),
} }
// (table) => [ // (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),

View File

@@ -0,0 +1,40 @@
import {
date,
integer,
pgTable,
text,
timestamp,
uuid,
} from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
export const invHistoricalData = pgTable(
"invHistoricalData",
{
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
histDate: date("histDate").notNull(), // what month are we running in should just be the first of current month
plantToken: text("plantToken"),
article: text("article").notNull(),
articleDescription: text("articleDescription").notNull(),
materialType: text("materialType"),
total_QTY: text("total_QTY"),
avaliable_QTY: text("avaliable_QTY"),
coa_QTY: text("coa_QTY"),
held_QTY: text("held_QTY"),
consignment: text("consignment"),
location: text("location"),
upd_user: text("upd_user"),
upd_date: timestamp("upd_date"),
}
// (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name),
// ]
);
// Schema for inserting a user - can be used to validate API requests
// export const insertRolesSchema = createInsertSchema(roles, {
// name: z.string().min(3, {message: "Role name must be more than 3 letters"}),
// });
// Schema for selecting a Expenses - can be used to validate API responses
export const selectRolesSchema = createSelectSchema(invHistoricalData);

View File

@@ -0,0 +1,91 @@
import { db } from "../../../../../database/dbclient.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { totalInvNoRn } from "../../../sqlServer/querys/dataMart/totalINV.js";
import { invHistoricalData } from "../../../../../database/schema/historicalINV.js";
import { format } from "date-fns-tz";
import { settings } from "../../../../../database/schema/settings.js";
import { sql } from "drizzle-orm";
import { createLogisticsJob } from "../../utils/logisticsIntervals.js";
export const runHistoricalData = async () => {
/**
* Runs a query at shift change on first shift each day this will be the closest date to the true historical data for blocked, consignment
*/
const { data: set, error: setError } = await tryCatch(
db.select().from(settings)
);
if (setError) {
createLog(
"error",
"lst",
"eom",
"There was an error getting eom historical inv data."
);
return;
}
const timeZone = set.filter((n: any) => n.name === "timezone");
createLogisticsJob("histInv", `0 6 * * *`, timeZone[0].value, async () => {
// remove the lotnumber from the query
const updatedQuery = totalInvNoRn.replaceAll(
",IdProdPlanung",
"--,IdProdPlanung"
);
const { data: inv, error: invError } = await tryCatch(
query(updatedQuery, "EOM historical inv")
);
if (invError) {
createLog(
"error",
"lst",
"eom",
"There was an error getting eom historical inv data."
);
return;
}
/**
* add the inv into the hist table
*/
const setting: any = set;
for (let i = 0; i < inv?.data.length; i++) {
const current = inv?.data[i];
const { data, error } = await tryCatch(
db.insert(invHistoricalData).values({
histDate: format(new Date(), "MM-dd-yyyy"),
plantToken: setting.filter(
(n: any) => n.name === "plantToken"
)[0].value,
article: current.av,
articleDescription: current.Alias,
total_QTY: current.Total_PalletQTY,
avaliable_QTY: current.Avaliable_PalletQTY,
coa_QTY: current.COA_QTY,
held_QTY: current.Held_QTY,
consignment: current.Consigment,
//location: integer("location"),
upd_user: "LST",
upd_date: sql`NOW()`,
})
);
if (error) {
createLog(
"error",
"lst",
"eom",
`Error addeding historical data, ${error}`
);
}
}
});
};

View File

@@ -16,6 +16,7 @@ import standardTemplate from "./route/dm/getStandardTemplate.js";
import standardForcasttemplate from "./route/dm/getStandardForecastTemplate.js"; import standardForcasttemplate from "./route/dm/getStandardForecastTemplate.js";
import postForecast from "./route/dm/forecastIn.js"; import postForecast from "./route/dm/forecastIn.js";
import outbound from "./route/getOutbound.js"; import outbound from "./route/getOutbound.js";
import { runHistoricalData } from "./controller/eom/historicalInv.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
@@ -49,7 +50,8 @@ const appRoutes = routes.forEach((route) => {
setTimeout(() => { setTimeout(() => {
migrateAdjustments(); migrateAdjustments();
}, 120 * 1000); runHistoricalData();
}, 120 * 1000); // starts 2 min after a server restart or crash.
/** /**
* Start the cycle count check * Start the cycle count check

View File

@@ -0,0 +1,61 @@
import { Cron } from "croner";
import type { JobInfo } from "../../../types/JobInfo.js";
import { createLog } from "../../logger/logger.js";
export let runningLogisticsCrons: Record<string, Cron> = {};
export const createLogisticsJob = (
id: string, // this is just the name of the job running
schedule: string, // `*/30 * * * *`; // default to be every 30 min
timezone: string,
task: () => Promise<void>
) => {
// Destroy existing job if it exists
if (runningLogisticsCrons[id]) {
runningLogisticsCrons[id].stop(); // Croner uses .stop() instead of .destroy()
}
// Create new job with Croner
runningLogisticsCrons[id] = new Cron(
schedule,
{
timezone: timezone,
catch: true, // Prevents unhandled rejections
},
task
);
createLog(
"info",
"lst",
"logistics",
`Cron setup for ${id}, trigger time: ${schedule}`
);
// Optional: Add error handling (Croner emits 'error' events)
// runningNotifications[id].on("error", (err) => {
// console.error(`Job ${id} failed:`, err);
// });
};
export const getAllLogisticsJobs = (): JobInfo[] => {
return Object.entries(runningLogisticsCrons).map(([id, job]) => ({
id,
schedule: job.getPattern() || "invalid",
nextRun: job.nextRun() || null,
lastRun: job.previousRun() || null,
isRunning: job ? !job.isStopped() : false,
}));
};
// const removeNotification = (id: any) => {
// if (runningLogisticsCrons[id]) {
// runningLogisticsCrons[id].stop();
// delete runningLogisticsCrons[id];
// }
// };
export const stopAllLogisticsJobs = () => {
Object.values(runningLogisticsCrons).forEach((job: any) => job.stop());
runningLogisticsCrons = {}; // Clear the object
};

View File

@@ -1,11 +1,12 @@
import { db } from "../../../../database/dbclient.js"; import { db } from "../../../../database/dbclient.js";
import { notifications } from "../../../../database/schema/notifications.js"; import { notifications } from "../../../../database/schema/notifications.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
import type { JobInfo } from "../../../types/JobInfo.js";
import { createLog } from "../../logger/logger.js"; import { createLog } from "../../logger/logger.js";
import { Cron } from "croner"; import { Cron } from "croner";
// Store active timeouts by notification ID // Store active timeouts by notification ID
export let runningNotifications: Record<string, Cron> = {}; export let runningCrons: Record<string, Cron> = {};
export const startNotificationMonitor = async () => { export const startNotificationMonitor = async () => {
// if restarted or crashed we need to make sure the running notifications is cleared // if restarted or crashed we need to make sure the running notifications is cleared
@@ -29,7 +30,7 @@ export const startNotificationMonitor = async () => {
for (const note of notes) { for (const note of notes) {
//if we get deactivated remove it. //if we get deactivated remove it.
if (runningNotifications[note.name] && !note.active) { if (runningCrons[note.name] && !note.active) {
createLog( createLog(
"info", "info",
"notify", "notify",
@@ -44,12 +45,12 @@ export const startNotificationMonitor = async () => {
if ( if (
!note.active || !note.active ||
// note.emails === "" || // note.emails === "" ||
runningNotifications[note.name] runningCrons[note.name]
) { ) {
continue; continue;
} }
if (!runningNotifications[note.name] && note.active) { if (!runningCrons[note.name] && note.active) {
createLog( createLog(
"info", "info",
"notify", "notify",
@@ -105,12 +106,12 @@ export const startNotificationMonitor = async () => {
const createJob = (id: string, schedule: string, task: () => Promise<void>) => { const createJob = (id: string, schedule: string, task: () => Promise<void>) => {
// Destroy existing job if it exists // Destroy existing job if it exists
if (runningNotifications[id]) { if (runningCrons[id]) {
runningNotifications[id].stop(); // Croner uses .stop() instead of .destroy() runningCrons[id].stop(); // Croner uses .stop() instead of .destroy()
} }
// Create new job with Croner // Create new job with Croner
runningNotifications[id] = new Cron( runningCrons[id] = new Cron(
schedule, schedule,
{ {
timezone: "America/Chicago", timezone: "America/Chicago",
@@ -125,15 +126,8 @@ const createJob = (id: string, schedule: string, task: () => Promise<void>) => {
// }); // });
}; };
interface JobInfo {
id: string;
schedule: string;
nextRun: Date | null;
isRunning: boolean;
}
export const getAllJobs = (): JobInfo[] => { export const getAllJobs = (): JobInfo[] => {
return Object.entries(runningNotifications).map(([id, job]) => ({ return Object.entries(runningCrons).map(([id, job]) => ({
id, id,
schedule: job.getPattern() || "invalid", schedule: job.getPattern() || "invalid",
nextRun: job.nextRun() || null, nextRun: job.nextRun() || null,
@@ -143,15 +137,15 @@ export const getAllJobs = (): JobInfo[] => {
}; };
const removeNotification = (id: any) => { const removeNotification = (id: any) => {
if (runningNotifications[id]) { if (runningCrons[id]) {
runningNotifications[id].stop(); runningCrons[id].stop();
delete runningNotifications[id]; delete runningCrons[id];
} }
}; };
export const stopAllJobs = () => { export const stopAllJobs = () => {
Object.values(runningNotifications).forEach((job: any) => job.stop()); Object.values(runningCrons).forEach((job: any) => job.stop());
runningNotifications = {}; // Clear the object runningCrons = {}; // Clear the object
}; };
/* /*

View File

@@ -21,6 +21,13 @@ const newSettings = [
description: "What are we listening on", description: "What are we listening on",
moduleName: "server", moduleName: "server",
}, },
{
name: "timezone",
value: "America/Chicago",
description:
"What time zone is the server in this is used for cronjobs and some other time stuff",
moduleName: "server",
},
{ {
name: "dbUser", name: "dbUser",
value: "alplaprod", value: "alplaprod",

View File

@@ -11,9 +11,10 @@ sum(VerfuegbareMengeSum) as Avaliable_PalletQTY,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as COA_Pallets, sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as COA_Pallets,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as COA_QTY, sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as COA_QTY,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as Held_Pallets, sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as Held_Pallets,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as Held_QTY, sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as Held_QTY
IdProdPlanung as Lot, ,sum(case when x.WarenLagerLagerTyp = 8 then VerfuegbareMengeSum else 0 end) as Consigment
IdAdressen, ,IdProdPlanung as Lot
--,IdAdressen,
x.AdressBez x.AdressBez
--,* --,*
from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x
@@ -33,8 +34,11 @@ The data below will be controlled by the user in excell by default everything wi
*/ */
where /*IdArtikelTyp = 1 and */x.IdWarenlager not in (6, 1) where /*IdArtikelTyp = 1 and */x.IdWarenlager not in (6, 1)
group by x.idartikelVarianten, ArtikelVariantenAlias, IdProdPlanung, c.Description, IdAdressen, group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description
x.AdressBez --, x.Lfdnr --,IdAdressen
,x.AdressBez
,IdProdPlanung
--, x.Lfdnr
order by x.IdArtikelVarianten order by x.IdArtikelVarianten
`; `;
@@ -50,9 +54,9 @@ sum(VerfuegbareMengeSum) as Avaliable_PalletQTY,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as COA_Pallets, sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as COA_Pallets,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as COA_QTY, sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as COA_QTY,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as Held_Pallets, sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as Held_Pallets,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as Held_QTY, sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as Held_QTY
IdProdPlanung as Lot, ,IdProdPlanung as Lot
IdAdressen, ,IdAdressen,
x.AdressBez x.AdressBez
--,* --,*
from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x
@@ -72,8 +76,9 @@ The data below will be controlled by the user in excell by default everything wi
*/ */
where IdArtikelTyp = 1 and x.IdWarenlager not in (6, 1) where IdArtikelTyp = 1 and x.IdWarenlager not in (6, 1)
group by x.idartikelVarianten, ArtikelVariantenAlias, IdProdPlanung, c.Description, IdAdressen, group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description, IdAdressen,
x.AdressBez , x.Lfdnr x.AdressBez , x.Lfdnr,
IdProdPlanung -- this will be flagged as being removed when we do historical.
order by x.IdArtikelVarianten order by x.IdArtikelVarianten
`; `;

6
server/types/JobInfo.ts Normal file
View File

@@ -0,0 +1,6 @@
export interface JobInfo {
id: string;
schedule: string;
nextRun: Date | null;
isRunning: boolean;
}