Compare commits
4 Commits
8dfcbc5720
...
995b1dda7c
| Author | SHA1 | Date | |
|---|---|---|---|
| 995b1dda7c | |||
| 97f93a1830 | |||
| 635635b356 | |||
| a691dc276e |
23
backend/configs/gpSql.config.ts
Normal file
23
backend/configs/gpSql.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import type sql from "mssql";
|
||||
|
||||
const username = "gpviewer";
|
||||
const password = "gp$$ViewOnly!";
|
||||
|
||||
export const gpSqlConfig: sql.config = {
|
||||
server: `USMCD1VMS011`,
|
||||
database: `ALPLA`,
|
||||
user: username,
|
||||
password: password,
|
||||
options: {
|
||||
encrypt: true,
|
||||
trustServerCertificate: true,
|
||||
},
|
||||
requestTimeout: 90000, // how long until we kill the query and fail it
|
||||
pool: {
|
||||
max: 20, // Maximum number of connections in the pool
|
||||
min: 0, // Minimum number of connections in the pool
|
||||
idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released
|
||||
reapIntervalMillis: 1000, // how often to check for idle resources to destroy
|
||||
acquireTimeoutMillis: 100000, // How long until a complete timeout happens
|
||||
},
|
||||
};
|
||||
@@ -22,9 +22,10 @@ export const alplaPurchaseHistory = pgTable("alpla_purchase_history", {
|
||||
upd_user: text("upd_user"),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
remark: text("remark"),
|
||||
approvedStatus: text("approved_status").default("pending"),
|
||||
approvedStatus: text("approved_status").default("new"),
|
||||
position: jsonb("position").default([]),
|
||||
createdAt: timestamp("created_at").defaultNow(),
|
||||
updatedAt: timestamp("updated_at").defaultNow(),
|
||||
});
|
||||
|
||||
export const alplaPurchaseHistorySchema =
|
||||
|
||||
17
backend/gpSql/gpSql.routes.ts
Normal file
17
backend/gpSql/gpSql.routes.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { type Express, Router } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import restart from "./gpSqlRestart.route.js";
|
||||
import start from "./gpSqlStart.route.js";
|
||||
import stop from "./gpSqlStop.route.js";
|
||||
export const setupGPSqlRoutes = (baseUrl: string, app: Express) => {
|
||||
//setup all the routes
|
||||
// Apply auth to entire router
|
||||
const router = Router();
|
||||
router.use(requireAuth);
|
||||
|
||||
router.use(start);
|
||||
router.use(stop);
|
||||
router.use(restart);
|
||||
|
||||
app.use(`${baseUrl}/api/system/gpSql`, router);
|
||||
};
|
||||
155
backend/gpSql/gpSqlConnection.controller.ts
Normal file
155
backend/gpSql/gpSqlConnection.controller.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import sql from "mssql";
|
||||
import { gpSqlConfig } from "../configs/gpSql.config.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { checkHostnamePort } from "../utils/checkHost.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
|
||||
export let pool2: sql.ConnectionPool;
|
||||
export let connected: boolean = false;
|
||||
export let reconnecting = false;
|
||||
|
||||
export const connectGPSql = async () => {
|
||||
const serverUp = await checkHostnamePort(`USMCD1VMS011:1433`);
|
||||
if (!serverUp) {
|
||||
// we will try to reconnect
|
||||
connected = false;
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "GP server is offline or unreachable.",
|
||||
});
|
||||
}
|
||||
|
||||
// if we are trying to click restart from the api for some reason we want to kick back and say no
|
||||
if (connected) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "The Sql server is already connected.",
|
||||
});
|
||||
}
|
||||
|
||||
// try to connect to the sql server
|
||||
try {
|
||||
pool2 = new sql.ConnectionPool(gpSqlConfig);
|
||||
await pool2.connect();
|
||||
connected = true;
|
||||
return returnFunc({
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: `${gpSqlConfig.server} is connected to ${gpSqlConfig.database}`,
|
||||
data: [],
|
||||
notify: false,
|
||||
});
|
||||
} catch (error) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "Failed to connect to the prod sql server.",
|
||||
data: [error],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const closePool = async () => {
|
||||
if (!connected) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "There is no connection to the prod server currently.",
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await pool2.close();
|
||||
connected = false;
|
||||
return returnFunc({
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "The sql connection has been closed.",
|
||||
});
|
||||
} catch (error) {
|
||||
connected = false;
|
||||
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "There was an error closing the sql connection",
|
||||
data: [error],
|
||||
});
|
||||
}
|
||||
};
|
||||
export const reconnectToSql = async () => {
|
||||
const log = createLogger({
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
});
|
||||
if (reconnecting) return;
|
||||
|
||||
//set reconnecting to true while we try to reconnect
|
||||
reconnecting = true;
|
||||
|
||||
// start the delay out as 2 seconds
|
||||
let delayStart = 2000;
|
||||
let attempt = 0;
|
||||
const maxAttempts = 10;
|
||||
|
||||
while (!connected && attempt < maxAttempts) {
|
||||
attempt++;
|
||||
log.info(
|
||||
`Reconnect attempt ${attempt}/${maxAttempts} in ${delayStart / 1000}s ...`,
|
||||
);
|
||||
|
||||
await new Promise((res) => setTimeout(res, delayStart));
|
||||
|
||||
const serverUp = await checkHostnamePort(`${process.env.PROD_SERVER}:1433`);
|
||||
|
||||
if (!serverUp) {
|
||||
delayStart = Math.min(delayStart * 2, 30000); // exponential backoff until up to 30000
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
pool2 = await sql.connect(gpSqlConfig);
|
||||
reconnecting = false;
|
||||
connected = true;
|
||||
log.info(`${gpSqlConfig.server} is connected to ${gpSqlConfig.database}`);
|
||||
} catch (error) {
|
||||
delayStart = Math.min(delayStart * 2, 30000);
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "Failed to reconnect to the prod sql server.",
|
||||
data: [error],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!connected) {
|
||||
log.error(
|
||||
{ notify: true },
|
||||
"Max reconnect attempts reached on the prodSql server. Stopping retries.",
|
||||
);
|
||||
|
||||
reconnecting = false;
|
||||
// TODO: exit alert someone here
|
||||
}
|
||||
};
|
||||
97
backend/gpSql/gpSqlQuery.controller.ts
Normal file
97
backend/gpSql/gpSqlQuery.controller.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import {
|
||||
connected,
|
||||
pool2,
|
||||
reconnecting,
|
||||
reconnectToSql,
|
||||
} from "./gpSqlConnection.controller.js";
|
||||
|
||||
interface SqlError extends Error {
|
||||
code?: string;
|
||||
originalError?: {
|
||||
info?: { message?: string };
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a prod query
|
||||
* just pass over the query as a string and the name of the query.
|
||||
* Query should be like below.
|
||||
* * select * from AlplaPROD_test1.dbo.table
|
||||
* You must use test1 always as it will be changed via query
|
||||
*/
|
||||
export const gpQuery = async (queryToRun: string, name: string) => {
|
||||
if (!connected) {
|
||||
reconnectToSql();
|
||||
|
||||
if (reconnecting) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
message: `The sql ${process.env.PROD_PLANT_TOKEN} is trying to reconnect already`,
|
||||
data: [],
|
||||
notify: false,
|
||||
});
|
||||
} else {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
message: `${process.env.PROD_PLANT_TOKEN} is not connected, and failed to connect.`,
|
||||
data: [],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//change to the correct server
|
||||
const query = queryToRun.replaceAll(
|
||||
"test1",
|
||||
`${process.env.PROD_PLANT_TOKEN}`,
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await pool2.request().query(query);
|
||||
return {
|
||||
success: true,
|
||||
message: `Query results for: ${name}`,
|
||||
data: result.recordset ?? [],
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
const err = error as SqlError;
|
||||
if (err.code === "ETIMEOUT") {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} did not run due to a timeout.`,
|
||||
notify: false,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (err.code === "EREQUEST") {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} encountered an error ${err.originalError?.info?.message || "undefined error"}`,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} encountered an unknown error.`,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
};
|
||||
29
backend/gpSql/gpSqlQuerySelector.utils.ts
Normal file
29
backend/gpSql/gpSqlQuerySelector.utils.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
|
||||
export type SqlGPQuery = {
|
||||
query: string;
|
||||
success: boolean;
|
||||
message: string;
|
||||
};
|
||||
|
||||
export const sqlGpQuerySelector = (name: string) => {
|
||||
try {
|
||||
const queryFile = readFileSync(
|
||||
new URL(`../gpSql/queries/${name}.sql`, import.meta.url),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Query for: ${name}`,
|
||||
query: queryFile,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"Error getting the query file, please make sure you have the correct name.",
|
||||
};
|
||||
}
|
||||
};
|
||||
23
backend/gpSql/gpSqlRestart.route.ts
Normal file
23
backend/gpSql/gpSqlRestart.route.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { closePool, connectGPSql } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/restart", async (_, res) => {
|
||||
await closePool();
|
||||
|
||||
await new Promise((r) => setTimeout(r, 2000));
|
||||
|
||||
const connect = await connectGPSql();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: "Sql Server has been restarted",
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
export default r;
|
||||
20
backend/gpSql/gpSqlStart.route.ts
Normal file
20
backend/gpSql/gpSqlStart.route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { connectGPSql } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/start", async (_, res) => {
|
||||
const connect = await connectGPSql();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: connect.message,
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
20
backend/gpSql/gpSqlStop.route.ts
Normal file
20
backend/gpSql/gpSqlStop.route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { closePool } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/stop", async (_, res) => {
|
||||
const connect = await closePool();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: connect.message,
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
39
backend/gpSql/queries/reqCheck.sql
Normal file
39
backend/gpSql/queries/reqCheck.sql
Normal file
@@ -0,0 +1,39 @@
|
||||
USE [ALPLA]
|
||||
|
||||
SELECT Distinct r.[POPRequisitionNumber] as req,
|
||||
r.[ApprovalStatus] as approvalStatus,
|
||||
r.[Requested By] requestedBy,
|
||||
format(t.[Created Date], 'yyyy-MM-dd') as createdAt,
|
||||
format(r.[Requisition Date], 'MM/dd/yyyy') as expectedDate,
|
||||
r.[Requisition Amount] as glAccount,
|
||||
case when r.[Account Segment 2] is null or r.[Account Segment 2] = '' then '999' else cast(r.[Account Segment 2] as varchar) end as plant
|
||||
,t.Status as status
|
||||
,t.[Document Status] as docStatus
|
||||
,t.[Workflow Status] as reqState
|
||||
,CASE
|
||||
WHEN [Workflow Status] = 'Completed'
|
||||
THEN 'Pending APO convertion'
|
||||
WHEN [Workflow Status] = 'Pending User Action'
|
||||
AND r.[ApprovalStatus] = 'Pending Approval'
|
||||
THEN 'Pending plant approver'
|
||||
WHEN [Workflow Status] = ''
|
||||
AND r.[ApprovalStatus] = 'Not Submitted'
|
||||
THEN 'Req not submited'
|
||||
ELSE 'Unknown reason'
|
||||
END AS approvedStatus
|
||||
|
||||
FROM [dbo].[PORequisitions] r (nolock)
|
||||
|
||||
|
||||
|
||||
left join
|
||||
[dbo].[PurchaseRequisitions] as t (nolock) on
|
||||
t.[Requisition Number] = r.[POPRequisitionNumber]
|
||||
|
||||
|
||||
--where ApprovalStatus = 'Pending Approval'
|
||||
--and [Account Segment 2] = 80
|
||||
|
||||
where r.POPRequisitionNumber in ([reqsToCheck])
|
||||
|
||||
Order By r.POPRequisitionNumber
|
||||
@@ -90,13 +90,22 @@ const reprint = async (data: any, emails: string) => {
|
||||
});
|
||||
|
||||
if (!sentEmail?.success) {
|
||||
// sendEmail({
|
||||
// email: "Blake.matths@alpla.com",
|
||||
// subject: `${os.hostname()} failed to run ${data[0]?.name}.`,
|
||||
// template: "serverCrash",
|
||||
// context: {
|
||||
// error: sentEmail?.data,
|
||||
// plant: `${os.hostname()}`,
|
||||
// },
|
||||
// });
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "email",
|
||||
subModule: "notification",
|
||||
module: "notification",
|
||||
subModule: "email",
|
||||
message: `${l[0].name} failed to send the email`,
|
||||
data: [sentEmail],
|
||||
data: [sentEmail?.data],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
117
backend/purchase/puchase.gpCheck.ts
Normal file
117
backend/purchase/puchase.gpCheck.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { gpQuery } from "../gpSql/gpSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlGPQuery,
|
||||
sqlGpQuerySelector,
|
||||
} from "../gpSql/gpSqlQuerySelector.utils.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import type { GpStatus } from "../types/purhcaseTypes.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
|
||||
const log = createLogger({ module: "purchase", subModule: "gp" });
|
||||
|
||||
export const gpReqCheck = async (data: GpStatus[]) => {
|
||||
const gpReqCheck = sqlGpQuerySelector("reqCheck") as SqlGPQuery;
|
||||
const reqs = data.map((r) => r.req.trim());
|
||||
|
||||
if (!gpReqCheck.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "purchase",
|
||||
subModule: "query",
|
||||
message: `Error getting alpla purchase info`,
|
||||
data: [gpReqCheck.message],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// check the initial req table
|
||||
const result = await gpQuery(
|
||||
gpReqCheck.query.replace(
|
||||
"[reqsToCheck]",
|
||||
data.map((r) => `'${r.req}'`).join(", ") ?? "",
|
||||
),
|
||||
"Get req info",
|
||||
);
|
||||
|
||||
log.debug(
|
||||
{},
|
||||
`There are ${result.data.length} reqs that need to be updated with there current status`,
|
||||
);
|
||||
|
||||
const firstFound = result.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const firstFoundSet = new Set(result.data.map((r) => r.req.trim()));
|
||||
|
||||
const missing1Reqs = reqs.filter((req) => !firstFoundSet.has(req));
|
||||
|
||||
//check if we have a recall on our req
|
||||
const reqCheck = await gpQuery(
|
||||
`select
|
||||
[Requisition Number] as req
|
||||
,case when [Workflow Status] = 'recall' then 'returned' else [Workflow Status] end as approvedStatus
|
||||
--,*
|
||||
from [dbo].[PurchaseRequisitions] where [Requisition Number] in (${missing1Reqs.map((r) => `'${r}'`).join(", ")})`,
|
||||
"validate req is not in recall",
|
||||
);
|
||||
|
||||
const secondFound = reqCheck.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const secondFoundSet =
|
||||
new Set(reqCheck.data.map((r) => r.req.trim())) ?? [];
|
||||
|
||||
const missing2Reqs = missing1Reqs.filter((req) => !secondFoundSet.has(req));
|
||||
|
||||
// check if we have a po already
|
||||
const apoCheck = await gpQuery(
|
||||
`select
|
||||
SOPNUMBE
|
||||
,PONUMBER
|
||||
,reqStatus='converted'
|
||||
,*
|
||||
from alpla.dbo.sop60100 (nolock) where sopnumbe in (${missing2Reqs.map((r) => `'${r}'`).join(", ")})`,
|
||||
"Get release info",
|
||||
);
|
||||
|
||||
const thirdRound = apoCheck.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const missing3Reqs = missing2Reqs.filter((req) => !secondFoundSet.has(req));
|
||||
|
||||
// remaining just got canceled or no longer exist
|
||||
const remaining = missing3Reqs.map((m) => ({
|
||||
req: m,
|
||||
approvedStatus: "canceled",
|
||||
}));
|
||||
|
||||
const allFound = [
|
||||
...firstFound,
|
||||
...secondFound,
|
||||
...thirdRound,
|
||||
...remaining,
|
||||
];
|
||||
|
||||
const statusMap = new Map(
|
||||
allFound.map((r: any) => [r.req, r.approvedStatus]),
|
||||
);
|
||||
|
||||
const updateData = data.map((row) => ({
|
||||
id: row.id,
|
||||
//req: row.req,
|
||||
approvedStatus: statusMap.get(row.req.trim()) ?? null,
|
||||
}));
|
||||
|
||||
return updateData;
|
||||
} catch (error) {
|
||||
log.error({ stack: error });
|
||||
}
|
||||
};
|
||||
@@ -2,7 +2,7 @@
|
||||
* This will monitor alpla purchase
|
||||
*/
|
||||
|
||||
import { eq } from "drizzle-orm";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import {
|
||||
alplaPurchaseHistory,
|
||||
@@ -15,10 +15,12 @@ import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import type { GpStatus, StatusUpdate } from "../types/purhcaseTypes.js";
|
||||
import { createCronJob } from "../utils/croner.utils.js";
|
||||
import { delay } from "../utils/delay.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { gpReqCheck } from "./puchase.gpCheck.js";
|
||||
|
||||
const log = createLogger({ module: "purchase", subModule: "purchaseMonitor" });
|
||||
|
||||
@@ -84,14 +86,145 @@ export const monitorAlplaPurchase = async () => {
|
||||
await delay(500);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(
|
||||
{ error: e },
|
||||
"Error occurred while running the monitor job",
|
||||
);
|
||||
log.error({ error: e }, "Error occurred while running the monitor job");
|
||||
log.error({ error: e }, "Error occurred while running the monitor job");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// re-pull re-pull everything that has approvedStatus is pending
|
||||
|
||||
const { data: allReq, error: errorReq } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(alplaPurchaseHistory)
|
||||
.where(eq(alplaPurchaseHistory.approvedStatus, "new")),
|
||||
);
|
||||
|
||||
// if theres no reqs just end meow
|
||||
if (errorReq) {
|
||||
log.error(
|
||||
{ stack: errorReq },
|
||||
"There was an error getting history data",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug({}, `There are ${allReq.length} pending reqs to be updated`);
|
||||
|
||||
if (!allReq.length) {
|
||||
log.debug({}, "There are not reqs to be processed");
|
||||
return;
|
||||
}
|
||||
/**
|
||||
* approvedStatus
|
||||
* remark = '' then pending req/manual po
|
||||
* pending = pending
|
||||
* approved = approved
|
||||
*
|
||||
*/
|
||||
|
||||
// the flow for all the fun stuff
|
||||
|
||||
const needsGpLookup: GpStatus[] = [];
|
||||
const updates: StatusUpdate[] = [];
|
||||
|
||||
for (const row of allReq ?? []) {
|
||||
const remark = row.remark?.toLowerCase() ?? "";
|
||||
|
||||
if (remark === "") {
|
||||
updates.push({ id: row.id, approvedStatus: "initial" });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (remark.includes("rct")) {
|
||||
updates.push({ id: row.id, approvedStatus: "received" });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (remark.includes("apo")) {
|
||||
updates.push({ id: row.id, approvedStatus: "approved" });
|
||||
continue;
|
||||
}
|
||||
|
||||
// not handled locally, defer to GP lookup
|
||||
needsGpLookup.push({ id: row.id, req: row.remark?.trim() ?? "" });
|
||||
}
|
||||
|
||||
const gpSmash = (await gpReqCheck(needsGpLookup)) as StatusUpdate[];
|
||||
|
||||
const merge = [...updates, ...gpSmash];
|
||||
|
||||
if (merge.length > 0) {
|
||||
await db.execute(sql`
|
||||
UPDATE ${alplaPurchaseHistory}
|
||||
SET approved_status = CASE
|
||||
${sql.join(
|
||||
merge.map(
|
||||
(row) =>
|
||||
sql`WHEN ${alplaPurchaseHistory.id} = ${row.id} THEN ${row.approvedStatus}`,
|
||||
),
|
||||
sql` `,
|
||||
)}
|
||||
ELSE approved_status
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE ${alplaPurchaseHistory.id} IN (
|
||||
${sql.join(
|
||||
merge.map((row) => sql`${row.id}`),
|
||||
sql`, `,
|
||||
)}
|
||||
)
|
||||
`);
|
||||
log.info(
|
||||
{},
|
||||
"All alpla purchase orders have been processed and updated",
|
||||
);
|
||||
}
|
||||
|
||||
// for reqs, create a string of reqs then run them through the gp req table to see there status. then update in lst ass see fit.
|
||||
|
||||
// then double check if we have all reqs covered, for the reqs missing from above restring them and check the po table
|
||||
|
||||
// these ones will be called to as converted to po
|
||||
|
||||
// for the remaining reqs from above check the actual req table to see the status of it if the workflow is set at Recall this means a change was requested from purchasing team and needs to be re approved
|
||||
|
||||
// for all remaining reqs we change them to replace/canceled
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// const updates = (allReq ?? [])
|
||||
// .map((row) => {
|
||||
// const remark = row.remark?.toLowerCase() ?? "";
|
||||
|
||||
// let approvedStatus: string | null = null;
|
||||
|
||||
// // priority order matters here
|
||||
// if (remark === "") {
|
||||
// approvedStatus = "initial";
|
||||
// } else if (remark.includes("rct")) {
|
||||
// approvedStatus = "received";
|
||||
// } else if (remark.includes("apo")) {
|
||||
// approvedStatus = "approved";
|
||||
// }
|
||||
|
||||
// // add your next 4 checks here
|
||||
// // else if (...) approvedStatus = "somethingElse";
|
||||
|
||||
// if (!approvedStatus) return null;
|
||||
|
||||
// return {
|
||||
// id: row.id,
|
||||
// approvedStatus,
|
||||
// };
|
||||
// })
|
||||
// .filter(
|
||||
// (
|
||||
// row,
|
||||
// ): row is {
|
||||
// id: string;
|
||||
// approvedStatus: string;
|
||||
// } => row !== null,
|
||||
// );
|
||||
|
||||
@@ -4,6 +4,7 @@ import { setupAuthRoutes } from "./auth/auth.routes.js";
|
||||
// import the routes and route setups
|
||||
import { setupApiDocsRoutes } from "./configs/scaler.config.js";
|
||||
import { setupDatamartRoutes } from "./datamart/datamart.routes.js";
|
||||
import { setupGPSqlRoutes } from "./gpSql/gpSql.routes.js";
|
||||
import { setupNotificationRoutes } from "./notification/notification.routes.js";
|
||||
import { setupOCPRoutes } from "./ocp/ocp.routes.js";
|
||||
import { setupOpendockRoutes } from "./opendock/opendock.routes.js";
|
||||
@@ -16,6 +17,7 @@ export const setupRoutes = (baseUrl: string, app: Express) => {
|
||||
setupSystemRoutes(baseUrl, app);
|
||||
setupApiDocsRoutes(baseUrl, app);
|
||||
setupProdSqlRoutes(baseUrl, app);
|
||||
setupGPSqlRoutes(baseUrl, app);
|
||||
setupDatamartRoutes(baseUrl, app);
|
||||
setupAuthRoutes(baseUrl, app);
|
||||
setupUtilsRoutes(baseUrl, app);
|
||||
|
||||
@@ -4,6 +4,7 @@ import createApp from "./app.js";
|
||||
import { db } from "./db/db.controller.js";
|
||||
import { dbCleanup } from "./db/dbCleanup.controller.js";
|
||||
import { type Setting, settings } from "./db/schema/settings.schema.js";
|
||||
import { connectGPSql } from "./gpSql/gpSqlConnection.controller.js";
|
||||
import { createLogger } from "./logger/logger.controller.js";
|
||||
import { startNotifications } from "./notification/notification.controller.js";
|
||||
import { createNotifications } from "./notification/notifications.master.js";
|
||||
@@ -28,6 +29,7 @@ const start = async () => {
|
||||
|
||||
// triggering long lived processes
|
||||
connectProdSql();
|
||||
connectGPSql();
|
||||
|
||||
// trigger startup processes these must run before anything else can run
|
||||
await baseSettingValidationCheck();
|
||||
|
||||
9
backend/types/purhcaseTypes.ts
Normal file
9
backend/types/purhcaseTypes.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export type GpStatus = {
|
||||
id: string;
|
||||
req: string;
|
||||
};
|
||||
|
||||
export type StatusUpdate = {
|
||||
id: string;
|
||||
approvedStatus: string;
|
||||
};
|
||||
@@ -31,7 +31,9 @@ interface Data<T = unknown[]> {
|
||||
| "post"
|
||||
| "notification"
|
||||
| "delete"
|
||||
| "printing";
|
||||
| "printing"
|
||||
| "gpSql"
|
||||
| "email";
|
||||
level: "info" | "error" | "debug" | "fatal";
|
||||
message: string;
|
||||
room?: string;
|
||||
@@ -63,13 +65,14 @@ export const returnFunc = (data: Data) => {
|
||||
log.info({ notify: notify, room }, data.message);
|
||||
break;
|
||||
case "error":
|
||||
log.error({ notify: notify, error: data.data, room }, data.message);
|
||||
log.error({ notify: notify, stack: data.data ?? [], room }, data.message);
|
||||
|
||||
break;
|
||||
case "debug":
|
||||
log.debug({ notify: notify, room }, data.message);
|
||||
log.debug({ notify: notify, stack: data.data ?? [], room }, data.message);
|
||||
break;
|
||||
case "fatal":
|
||||
log.fatal({ notify: notify, room }, data.message);
|
||||
log.fatal({ notify: notify, stack: data.data ?? [], room }, data.message);
|
||||
}
|
||||
|
||||
// api section to return
|
||||
|
||||
@@ -88,7 +88,7 @@ export const sendEmail = async (data: EmailData) => {
|
||||
level: "error",
|
||||
module: "utils",
|
||||
subModule: "sendmail",
|
||||
message: `Error sending Email to : ${data.email}`,
|
||||
message: `Error sending Email to : ${data.email}, Error: ${error.message}`,
|
||||
data: [{ error: error }],
|
||||
notify: false,
|
||||
});
|
||||
|
||||
1
migrations/0025_talented_vector.sql
Normal file
1
migrations/0025_talented_vector.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "alpla_purchase_history" ADD COLUMN "updated_at" timestamp DEFAULT now();
|
||||
1
migrations/0026_vengeful_wiccan.sql
Normal file
1
migrations/0026_vengeful_wiccan.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "alpla_purchase_history" ALTER COLUMN "approved_status" SET DEFAULT 'new';
|
||||
1474
migrations/meta/0025_snapshot.json
Normal file
1474
migrations/meta/0025_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1474
migrations/meta/0026_snapshot.json
Normal file
1474
migrations/meta/0026_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -176,6 +176,20 @@
|
||||
"when": 1775661516749,
|
||||
"tag": "0024_absent_barracuda",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 25,
|
||||
"version": "7",
|
||||
"when": 1775755338816,
|
||||
"tag": "0025_talented_vector",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 26,
|
||||
"version": "7",
|
||||
"when": 1775786221817,
|
||||
"tag": "0026_vengeful_wiccan",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
19
package-lock.json
generated
19
package-lock.json
generated
@@ -24,6 +24,7 @@
|
||||
"drizzle-zod": "^0.8.3",
|
||||
"express": "^5.2.1",
|
||||
"husky": "^9.1.7",
|
||||
"ldapts": "^8.1.7",
|
||||
"morgan": "^1.10.1",
|
||||
"mssql": "^12.2.1",
|
||||
"multer": "^2.1.1",
|
||||
@@ -8063,6 +8064,18 @@
|
||||
"node": ">=20.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapts": {
|
||||
"version": "8.1.7",
|
||||
"resolved": "https://registry.npmjs.org/ldapts/-/ldapts-8.1.7.tgz",
|
||||
"integrity": "sha512-TJl6T92eIwMf/OJ0hDfKVa6ISwzo+lqCWCI5Mf//ARlKa3LKQZaSrme/H2rCRBhW0DZCQlrsV+fgoW5YHRNLUw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"strict-event-emitter-types": "2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/lines-and-columns": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
|
||||
@@ -10422,6 +10435,12 @@
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/strict-event-emitter-types": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strict-event-emitter-types/-/strict-event-emitter-types-2.0.0.tgz",
|
||||
"integrity": "sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
|
||||
@@ -10,10 +10,12 @@
|
||||
"dev:frontend": "cd frontend && npm run dev",
|
||||
"dev:db:migrate": "npx drizzle-kit push",
|
||||
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle.config.ts",
|
||||
"build": "rimraf dist && npm run dev:db:generate && npm run dev:db:migrate && npm run build:app && npm run build:copySql && cd frontend && npm run build",
|
||||
"build": "rimraf dist && npm run dev:db:generate && npm run dev:db:migrate && npm run build:app && npm run build:copySql && npm run build:copyGpSql && npm run build:emailTemplate && cd frontend && npm run build",
|
||||
"build:app": "tsc",
|
||||
"agent": "powershell -ExecutionPolicy Bypass -File scripts/agentController.ps1",
|
||||
"build:docker": "rimraf dist && npm run build:app && npm run build:copySql",
|
||||
"build:docker": "rimraf dist && npm run build:app && npm run build:copySql && npm run build:copyGpSql && npm run build:emailTemplate",
|
||||
"build:emailTemplate": "cpy \"backend/utils/mailViews/**/*\" dist/utils/mailViews --parents",
|
||||
"build:copyGpSql": "cpy \"backend/gpSql/queries/**/*\" dist/gpSql/queries --parents",
|
||||
"build:copySql": "cpy \"backend/prodSql/queries/**/*\" dist/prodSql/queries --parents",
|
||||
"lint": "tsc && biome lint",
|
||||
"start": "npm run start:server",
|
||||
@@ -75,6 +77,7 @@
|
||||
"drizzle-zod": "^0.8.3",
|
||||
"express": "^5.2.1",
|
||||
"husky": "^9.1.7",
|
||||
"ldapts": "^8.1.7",
|
||||
"morgan": "^1.10.1",
|
||||
"mssql": "^12.2.1",
|
||||
"multer": "^2.1.1",
|
||||
|
||||
Reference in New Issue
Block a user