migration #51
10
database/migrations/0070_brief_mephisto.sql
Normal file
10
database/migrations/0070_brief_mephisto.sql
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
CREATE TABLE "labelRatio" (
|
||||||
|
" ratio_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"name" text DEFAULT 'labels',
|
||||||
|
"autoLabel" integer DEFAULT 0,
|
||||||
|
"manualLabel" integer DEFAULT 0,
|
||||||
|
"lastReset" timestamp DEFAULT now()
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
ALTER TABLE "invHistoricalData" ADD COLUMN "lot_number" text;--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "labelname" ON "labelRatio" USING btree ("name");
|
||||||
2
database/migrations/0071_fantastic_old_lace.sql
Normal file
2
database/migrations/0071_fantastic_old_lace.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE "invHistoricalData" ALTER COLUMN "upd_user" SET DEFAULT 'lst';--> statement-breakpoint
|
||||||
|
ALTER TABLE "invHistoricalData" ALTER COLUMN "upd_date" SET DEFAULT now();
|
||||||
2250
database/migrations/meta/0070_snapshot.json
Normal file
2250
database/migrations/meta/0070_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
2252
database/migrations/meta/0071_snapshot.json
Normal file
2252
database/migrations/meta/0071_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -491,6 +491,20 @@
|
|||||||
"when": 1752195894698,
|
"when": 1752195894698,
|
||||||
"tag": "0069_chemical_maximus",
|
"tag": "0069_chemical_maximus",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 70,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1754767718941,
|
||||||
|
"tag": "0070_brief_mephisto",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 71,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1754768521841,
|
||||||
|
"tag": "0071_fantastic_old_lace",
|
||||||
|
"breakpoints": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -12,7 +12,7 @@ export const invHistoricalData = pgTable(
|
|||||||
"invHistoricalData",
|
"invHistoricalData",
|
||||||
{
|
{
|
||||||
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
|
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
|
||||||
histDate: date("histDate").notNull(), // what month are we running in should just be the first of current month
|
histDate: date("histDate").notNull(), // this date should always be yesterday when we post it.
|
||||||
plantToken: text("plantToken"),
|
plantToken: text("plantToken"),
|
||||||
article: text("article").notNull(),
|
article: text("article").notNull(),
|
||||||
articleDescription: text("articleDescription").notNull(),
|
articleDescription: text("articleDescription").notNull(),
|
||||||
@@ -21,10 +21,11 @@ export const invHistoricalData = pgTable(
|
|||||||
avaliable_QTY: text("avaliable_QTY"),
|
avaliable_QTY: text("avaliable_QTY"),
|
||||||
coa_QTY: text("coa_QTY"),
|
coa_QTY: text("coa_QTY"),
|
||||||
held_QTY: text("held_QTY"),
|
held_QTY: text("held_QTY"),
|
||||||
|
lot_Number: text("lot_number"),
|
||||||
consignment: text("consignment"),
|
consignment: text("consignment"),
|
||||||
location: text("location"),
|
location: text("location"),
|
||||||
upd_user: text("upd_user"),
|
upd_user: text("upd_user").default("lst"),
|
||||||
upd_date: timestamp("upd_date"),
|
upd_date: timestamp("upd_date").defaultNow(),
|
||||||
}
|
}
|
||||||
// (table) => [
|
// (table) => [
|
||||||
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
// import {prisma} from "database";
|
|
||||||
// import {createLog} from "logging";
|
|
||||||
|
|
||||||
// export const deleteHistory = async (date: string) => {
|
|
||||||
// // delete the inventory if it equals this date
|
|
||||||
// try {
|
|
||||||
// const remove = await prisma.$executeRaw`
|
|
||||||
// DELETE FROM historyInventory
|
|
||||||
// WHERE histDate < ${date}
|
|
||||||
// `;
|
|
||||||
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
|
|
||||||
// } catch (error) {
|
|
||||||
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
@@ -4,10 +4,19 @@ const app = new OpenAPIHono();
|
|||||||
|
|
||||||
import stats from "./route/stats.js";
|
import stats from "./route/stats.js";
|
||||||
import history from "./route/invHistory.js";
|
import history from "./route/invHistory.js";
|
||||||
|
import { createJob } from "../notifications/utils/processNotifications.js";
|
||||||
|
import { historicalInvIMmport } from "./utils/historicalInv.js";
|
||||||
const routes = [stats, history] as const;
|
const routes = [stats, history] as const;
|
||||||
|
|
||||||
const appRoutes = routes.forEach((route) => {
|
const appRoutes = routes.forEach((route) => {
|
||||||
app.route("/eom", route);
|
app.route("/eom", route);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// setTimeout(() => {
|
||||||
|
// historicalInvIMmport();
|
||||||
|
// }, 5 * 1000);
|
||||||
|
// the time we want to run the hostircal data should be the same time the historical data run on the server
|
||||||
|
// getting this from the shift time
|
||||||
|
createJob("eom_historical_inv", "0 7 * * *", historicalInvIMmport);
|
||||||
|
|
||||||
export default app;
|
export default app;
|
||||||
|
|||||||
101
server/services/eom/utils/historicalInv.ts
Normal file
101
server/services/eom/utils/historicalInv.ts
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import { sql } from "drizzle-orm";
|
||||||
|
import { db } from "../../../../database/dbclient.js";
|
||||||
|
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
|
||||||
|
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||||
|
import { createLog } from "../../logger/logger.js";
|
||||||
|
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||||
|
import { totalInvNoRn } from "../../sqlServer/querys/dataMart/totalINV.js";
|
||||||
|
import { format } from "date-fns-tz";
|
||||||
|
import { serverSettings } from "../../server/controller/settings/getSettings.js";
|
||||||
|
import { deleteHistory } from "./removeHistorical.js";
|
||||||
|
|
||||||
|
export const historicalInvIMmport = async () => {
|
||||||
|
const plantToken = serverSettings.filter((n) => n.name === "plantToken");
|
||||||
|
const { data, error } = (await tryCatch(
|
||||||
|
db.select().from(invHistoricalData)
|
||||||
|
)) as any;
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
createLog(
|
||||||
|
"error",
|
||||||
|
"eom",
|
||||||
|
"eom",
|
||||||
|
`There was an error getting the historical data`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// check if we have data already for today this way we dont duplicate anything.
|
||||||
|
const today = new Date();
|
||||||
|
today.setDate(today.getDate() - 1);
|
||||||
|
|
||||||
|
const dateCheck = data?.filter(
|
||||||
|
(i: any) => i.histDate === format(today, "yyyy-MM-dd")
|
||||||
|
);
|
||||||
|
|
||||||
|
if (dateCheck.length === 0) {
|
||||||
|
// get the historical data from the sql
|
||||||
|
const { data: inv, error: invError } = (await tryCatch(
|
||||||
|
query(totalInvNoRn, "eom historical data")
|
||||||
|
)) as any;
|
||||||
|
|
||||||
|
if (invError) {
|
||||||
|
createLog(
|
||||||
|
"error",
|
||||||
|
"eom",
|
||||||
|
"eom",
|
||||||
|
`There was an error getting the sql data`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inv.data.length === 0) {
|
||||||
|
createLog("error", "eom", "eom", inv.message);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const importInv = inv.data ? inv.data : [];
|
||||||
|
const eomImportData = importInv.map((i: any) => {
|
||||||
|
return {
|
||||||
|
histDate: sql`(NOW() - INTERVAL '1 day')::date`,
|
||||||
|
plantToken: plantToken[0].value,
|
||||||
|
article: i.av,
|
||||||
|
articleDescription: i.Alias,
|
||||||
|
materialType: "",
|
||||||
|
total_QTY: i.Total_PalletQTY,
|
||||||
|
avaliable_QTY: i.Avaliable_PalletQTY,
|
||||||
|
coa_QTY: i.COA_QTY,
|
||||||
|
held_QTY: i.Held_QTY,
|
||||||
|
consignment: i.Consigment,
|
||||||
|
lot_Number: i.lot,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const { data: dataImport, error: errorImport } = await tryCatch(
|
||||||
|
db.insert(invHistoricalData).values(eomImportData)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (errorImport) {
|
||||||
|
createLog(
|
||||||
|
"error",
|
||||||
|
"eom",
|
||||||
|
"eom",
|
||||||
|
`There was an error importing all the inventory data.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataImport) {
|
||||||
|
createLog(
|
||||||
|
"info",
|
||||||
|
"eom",
|
||||||
|
"eom",
|
||||||
|
`All data was imported succefully.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
createLog("info", "eom", "eom", `Yesterdays Data already in..`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// do the check to delete old data
|
||||||
|
deleteHistory();
|
||||||
|
};
|
||||||
51
server/services/eom/utils/removeHistorical.ts
Normal file
51
server/services/eom/utils/removeHistorical.ts
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
// import {prisma} from "database";
|
||||||
|
// import {createLog} from "logging";
|
||||||
|
|
||||||
|
import { lte, sql } from "drizzle-orm";
|
||||||
|
import { db } from "../../../../database/dbclient.js";
|
||||||
|
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
|
||||||
|
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||||
|
import { createLog } from "../../logger/logger.js";
|
||||||
|
|
||||||
|
// export const deleteHistory = async (date: string) => {
|
||||||
|
// // delete the inventory if it equals this date
|
||||||
|
// try {
|
||||||
|
// const remove = await prisma.$executeRaw`
|
||||||
|
// DELETE FROM historyInventory
|
||||||
|
// WHERE histDate < ${date}
|
||||||
|
// `;
|
||||||
|
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
|
||||||
|
// } catch (error) {
|
||||||
|
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
|
||||||
|
// }
|
||||||
|
// };
|
||||||
|
|
||||||
|
export const deleteHistory = async () => {
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db
|
||||||
|
.delete(invHistoricalData)
|
||||||
|
.where(
|
||||||
|
lte(
|
||||||
|
invHistoricalData.histDate,
|
||||||
|
sql`(NOW() - INTERVAL '45 day')::date`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
createLog(
|
||||||
|
"error",
|
||||||
|
"eom",
|
||||||
|
"eom",
|
||||||
|
"There was an error deleting the historical data."
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
createLog(
|
||||||
|
"info",
|
||||||
|
"eom",
|
||||||
|
"eom",
|
||||||
|
"Data older than 45 days has been deleted."
|
||||||
|
);
|
||||||
|
};
|
||||||
Reference in New Issue
Block a user