Compare commits
17 Commits
v0.0.1-alp
...
87f738702a
| Author | SHA1 | Date | |
|---|---|---|---|
| 87f738702a | |||
| 38a0b65e94 | |||
| 9a0ef8e51a | |||
| dcb3f2dd13 | |||
| e47ea9ec52 | |||
| ca3425d327 | |||
| 3bf024cfc9 | |||
| 9d39c13510 | |||
| c9eb59e2ad | |||
| b0e5fd7999 | |||
| 07ebf88806 | |||
| 79e653efa3 | |||
| d05a0ce930 | |||
| 995b1dda7c | |||
| 97f93a1830 | |||
| 635635b356 | |||
| a691dc276e |
57
.env-example
57
.env-example
@@ -1,32 +1,51 @@
|
||||
NODE_ENV=development
|
||||
# Server
|
||||
PORT=3000
|
||||
URL=http://localhost:3000
|
||||
TIMEZONE=America/New_York
|
||||
TCP_PORT=2222
|
||||
|
||||
# authentication
|
||||
BETTER_AUTH_SECRET=""
|
||||
# Better auth Secret
|
||||
BETTER_AUTH_SECRET=
|
||||
RESET_EXPIRY_SECONDS=3600
|
||||
|
||||
# logging
|
||||
LOG_LEVEL=debug
|
||||
LOG_LEVEL=
|
||||
|
||||
# prodServer
|
||||
PROD_SERVER=usmcd1vms036
|
||||
PROD_PLANT_TOKEN=test3
|
||||
PROD_USER=alplaprod
|
||||
PROD_PASSWORD=password
|
||||
# SMTP password
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# opendock
|
||||
OPENDOCK_URL=https://neutron.opendock.com
|
||||
OPENDOCK_PASSWORD=
|
||||
DEFAULT_DOCK=
|
||||
DEFAULT_LOAD_TYPE=
|
||||
DEFAULT_CARRIER=
|
||||
|
||||
# prodServer when runing on an actual prod server use localhost this way we dont go out and back in.
|
||||
PROD_SERVER=
|
||||
PROD_PLANT_TOKEN=
|
||||
PROD_USER=
|
||||
PROD_PASSWORD=
|
||||
|
||||
# Tech user for alplaprod api
|
||||
TEC_API_KEY=
|
||||
|
||||
# AD STUFF
|
||||
# this is mainly used for purchase stuff to reference reqs
|
||||
LDAP_URL=
|
||||
|
||||
# postgres connection
|
||||
DATABASE_HOST=localhost
|
||||
DATABASE_PORT=5433
|
||||
DATABASE_USER=user
|
||||
DATABASE_PASSWORD=password
|
||||
DATABASE_DB=lst_dev
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_USER=
|
||||
DATABASE_PASSWORD=
|
||||
DATABASE_DB=
|
||||
|
||||
# how is the app running server or client when in client mode you must provide the server
|
||||
APP_RUNNING_IN=server
|
||||
SERVER_NAME=localhost
|
||||
# Gp connection
|
||||
GP_USER=
|
||||
GP_PASSWORD=
|
||||
|
||||
#dev stuff
|
||||
GITEA_TOKEN=""
|
||||
EMAIL_USER=""
|
||||
EMAIL_PASSWORD=""
|
||||
# how often to check for new/updated queries in min
|
||||
QUERY_TIME_TYPE=m #valid options are m, h
|
||||
QUERY_CHECK=1
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -65,6 +65,7 @@
|
||||
"onnotice",
|
||||
"opendock",
|
||||
"opendocks",
|
||||
"palletizer",
|
||||
"ppoo",
|
||||
"preseed",
|
||||
"prodlabels",
|
||||
|
||||
14
CHANGELOG.md
14
CHANGELOG.md
@@ -1,5 +1,19 @@
|
||||
# All Changes to LST can be found below.
|
||||
|
||||
## [0.0.1-alpha.3](https://git.tuffraid.net/cowch/lst_v3/compare/v0.0.1-alpha.2...v0.0.1-alpha.3) (2026-04-10)
|
||||
|
||||
|
||||
### 🌟 Enhancements
|
||||
|
||||
* **puchase hist:** finished up purhcase historical / gp updates ([a691dc2](https://git.tuffraid.net/cowch/lst_v3/commits/a691dc276e8650c669409241f73d7b2d7a1f9176))
|
||||
|
||||
|
||||
### 🛠️ Code Refactor
|
||||
|
||||
* **gp connect:** gp connect as was added to long live services ([635635b](https://git.tuffraid.net/cowch/lst_v3/commits/635635b356e1262e1c0b063408fe2209e6a8d4ec))
|
||||
* **reprints:** changes the module and submodule around to be more accurate ([97f93a1](https://git.tuffraid.net/cowch/lst_v3/commits/97f93a1830761437118863372108df810ce9977a))
|
||||
* **send email:** changes the error message to show the true message in the error ([995b1dd](https://git.tuffraid.net/cowch/lst_v3/commits/995b1dda7cdfebf4367d301ccac38fd339fab6dd))
|
||||
|
||||
## [0.0.1-alpha.2](https://git.tuffraid.net/cowch/lst_v3/compare/v0.0.1-alpha.1...v0.0.1-alpha.2) (2026-04-08)
|
||||
|
||||
|
||||
|
||||
23
backend/configs/gpSql.config.ts
Normal file
23
backend/configs/gpSql.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import type sql from "mssql";
|
||||
|
||||
const username = "gpviewer";
|
||||
const password = "gp$$ViewOnly!";
|
||||
|
||||
export const gpSqlConfig: sql.config = {
|
||||
server: `USMCD1VMS011`,
|
||||
database: `ALPLA`,
|
||||
user: username,
|
||||
password: password,
|
||||
options: {
|
||||
encrypt: true,
|
||||
trustServerCertificate: true,
|
||||
},
|
||||
requestTimeout: 90000, // how long until we kill the query and fail it
|
||||
pool: {
|
||||
max: 20, // Maximum number of connections in the pool
|
||||
min: 0, // Minimum number of connections in the pool
|
||||
idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released
|
||||
reapIntervalMillis: 1000, // how often to check for idle resources to destroy
|
||||
acquireTimeoutMillis: 100000, // How long until a complete timeout happens
|
||||
},
|
||||
};
|
||||
@@ -22,9 +22,10 @@ export const alplaPurchaseHistory = pgTable("alpla_purchase_history", {
|
||||
upd_user: text("upd_user"),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
remark: text("remark"),
|
||||
approvedStatus: text("approved_status").default("pending"),
|
||||
approvedStatus: text("approved_status").default("new"),
|
||||
position: jsonb("position").default([]),
|
||||
createdAt: timestamp("created_at").defaultNow(),
|
||||
updatedAt: timestamp("updated_at").defaultNow(),
|
||||
});
|
||||
|
||||
export const alplaPurchaseHistorySchema =
|
||||
|
||||
17
backend/gpSql/gpSql.routes.ts
Normal file
17
backend/gpSql/gpSql.routes.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { type Express, Router } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import restart from "./gpSqlRestart.route.js";
|
||||
import start from "./gpSqlStart.route.js";
|
||||
import stop from "./gpSqlStop.route.js";
|
||||
export const setupGPSqlRoutes = (baseUrl: string, app: Express) => {
|
||||
//setup all the routes
|
||||
// Apply auth to entire router
|
||||
const router = Router();
|
||||
router.use(requireAuth);
|
||||
|
||||
router.use(start);
|
||||
router.use(stop);
|
||||
router.use(restart);
|
||||
|
||||
app.use(`${baseUrl}/api/system/gpSql`, router);
|
||||
};
|
||||
155
backend/gpSql/gpSqlConnection.controller.ts
Normal file
155
backend/gpSql/gpSqlConnection.controller.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import sql from "mssql";
|
||||
import { gpSqlConfig } from "../configs/gpSql.config.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { checkHostnamePort } from "../utils/checkHost.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
|
||||
export let pool2: sql.ConnectionPool;
|
||||
export let connected: boolean = false;
|
||||
export let reconnecting = false;
|
||||
|
||||
export const connectGPSql = async () => {
|
||||
const serverUp = await checkHostnamePort(`USMCD1VMS011:1433`);
|
||||
if (!serverUp) {
|
||||
// we will try to reconnect
|
||||
connected = false;
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "GP server is offline or unreachable.",
|
||||
});
|
||||
}
|
||||
|
||||
// if we are trying to click restart from the api for some reason we want to kick back and say no
|
||||
if (connected) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "The Sql server is already connected.",
|
||||
});
|
||||
}
|
||||
|
||||
// try to connect to the sql server
|
||||
try {
|
||||
pool2 = new sql.ConnectionPool(gpSqlConfig);
|
||||
await pool2.connect();
|
||||
connected = true;
|
||||
return returnFunc({
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: `${gpSqlConfig.server} is connected to ${gpSqlConfig.database}`,
|
||||
data: [],
|
||||
notify: false,
|
||||
});
|
||||
} catch (error) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "Failed to connect to the prod sql server.",
|
||||
data: [error],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const closePool = async () => {
|
||||
if (!connected) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "There is no connection to the prod server currently.",
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await pool2.close();
|
||||
connected = false;
|
||||
return returnFunc({
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "The sql connection has been closed.",
|
||||
});
|
||||
} catch (error) {
|
||||
connected = false;
|
||||
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "There was an error closing the sql connection",
|
||||
data: [error],
|
||||
});
|
||||
}
|
||||
};
|
||||
export const reconnectToSql = async () => {
|
||||
const log = createLogger({
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
});
|
||||
if (reconnecting) return;
|
||||
|
||||
//set reconnecting to true while we try to reconnect
|
||||
reconnecting = true;
|
||||
|
||||
// start the delay out as 2 seconds
|
||||
let delayStart = 2000;
|
||||
let attempt = 0;
|
||||
const maxAttempts = 10;
|
||||
|
||||
while (!connected && attempt < maxAttempts) {
|
||||
attempt++;
|
||||
log.info(
|
||||
`Reconnect attempt ${attempt}/${maxAttempts} in ${delayStart / 1000}s ...`,
|
||||
);
|
||||
|
||||
await new Promise((res) => setTimeout(res, delayStart));
|
||||
|
||||
const serverUp = await checkHostnamePort(`${process.env.PROD_SERVER}:1433`);
|
||||
|
||||
if (!serverUp) {
|
||||
delayStart = Math.min(delayStart * 2, 30000); // exponential backoff until up to 30000
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
pool2 = await sql.connect(gpSqlConfig);
|
||||
reconnecting = false;
|
||||
connected = true;
|
||||
log.info(`${gpSqlConfig.server} is connected to ${gpSqlConfig.database}`);
|
||||
} catch (error) {
|
||||
delayStart = Math.min(delayStart * 2, 30000);
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "Failed to reconnect to the prod sql server.",
|
||||
data: [error],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!connected) {
|
||||
log.error(
|
||||
{ notify: true },
|
||||
"Max reconnect attempts reached on the prodSql server. Stopping retries.",
|
||||
);
|
||||
|
||||
reconnecting = false;
|
||||
// TODO: exit alert someone here
|
||||
}
|
||||
};
|
||||
97
backend/gpSql/gpSqlQuery.controller.ts
Normal file
97
backend/gpSql/gpSqlQuery.controller.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import {
|
||||
connected,
|
||||
pool2,
|
||||
reconnecting,
|
||||
reconnectToSql,
|
||||
} from "./gpSqlConnection.controller.js";
|
||||
|
||||
interface SqlError extends Error {
|
||||
code?: string;
|
||||
originalError?: {
|
||||
info?: { message?: string };
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a prod query
|
||||
* just pass over the query as a string and the name of the query.
|
||||
* Query should be like below.
|
||||
* * select * from AlplaPROD_test1.dbo.table
|
||||
* You must use test1 always as it will be changed via query
|
||||
*/
|
||||
export const gpQuery = async (queryToRun: string, name: string) => {
|
||||
if (!connected) {
|
||||
reconnectToSql();
|
||||
|
||||
if (reconnecting) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
message: `The sql ${process.env.PROD_PLANT_TOKEN} is trying to reconnect already`,
|
||||
data: [],
|
||||
notify: false,
|
||||
});
|
||||
} else {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
message: `${process.env.PROD_PLANT_TOKEN} is not connected, and failed to connect.`,
|
||||
data: [],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//change to the correct server
|
||||
const query = queryToRun.replaceAll(
|
||||
"test1",
|
||||
`${process.env.PROD_PLANT_TOKEN}`,
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await pool2.request().query(query);
|
||||
return {
|
||||
success: true,
|
||||
message: `Query results for: ${name}`,
|
||||
data: result.recordset ?? [],
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
const err = error as SqlError;
|
||||
if (err.code === "ETIMEOUT") {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} did not run due to a timeout.`,
|
||||
notify: false,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (err.code === "EREQUEST") {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} encountered an error ${err.originalError?.info?.message || "undefined error"}`,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} encountered an unknown error.`,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
};
|
||||
29
backend/gpSql/gpSqlQuerySelector.utils.ts
Normal file
29
backend/gpSql/gpSqlQuerySelector.utils.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
|
||||
export type SqlGPQuery = {
|
||||
query: string;
|
||||
success: boolean;
|
||||
message: string;
|
||||
};
|
||||
|
||||
export const sqlGpQuerySelector = (name: string) => {
|
||||
try {
|
||||
const queryFile = readFileSync(
|
||||
new URL(`../gpSql/queries/${name}.sql`, import.meta.url),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Query for: ${name}`,
|
||||
query: queryFile,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"Error getting the query file, please make sure you have the correct name.",
|
||||
};
|
||||
}
|
||||
};
|
||||
23
backend/gpSql/gpSqlRestart.route.ts
Normal file
23
backend/gpSql/gpSqlRestart.route.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { closePool, connectGPSql } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/restart", async (_, res) => {
|
||||
await closePool();
|
||||
|
||||
await new Promise((r) => setTimeout(r, 2000));
|
||||
|
||||
const connect = await connectGPSql();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: "Sql Server has been restarted",
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
export default r;
|
||||
20
backend/gpSql/gpSqlStart.route.ts
Normal file
20
backend/gpSql/gpSqlStart.route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { connectGPSql } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/start", async (_, res) => {
|
||||
const connect = await connectGPSql();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: connect.message,
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
20
backend/gpSql/gpSqlStop.route.ts
Normal file
20
backend/gpSql/gpSqlStop.route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { closePool } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/stop", async (_, res) => {
|
||||
const connect = await closePool();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: connect.message,
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
39
backend/gpSql/queries/reqCheck.sql
Normal file
39
backend/gpSql/queries/reqCheck.sql
Normal file
@@ -0,0 +1,39 @@
|
||||
USE [ALPLA]
|
||||
|
||||
SELECT Distinct r.[POPRequisitionNumber] as req,
|
||||
r.[ApprovalStatus] as approvalStatus,
|
||||
r.[Requested By] requestedBy,
|
||||
format(t.[Created Date], 'yyyy-MM-dd') as createdAt,
|
||||
format(r.[Requisition Date], 'MM/dd/yyyy') as expectedDate,
|
||||
r.[Requisition Amount] as glAccount,
|
||||
case when r.[Account Segment 2] is null or r.[Account Segment 2] = '' then '999' else cast(r.[Account Segment 2] as varchar) end as plant
|
||||
,t.Status as status
|
||||
,t.[Document Status] as docStatus
|
||||
,t.[Workflow Status] as reqState
|
||||
,CASE
|
||||
WHEN [Workflow Status] = 'Completed'
|
||||
THEN 'Pending APO convertion'
|
||||
WHEN [Workflow Status] = 'Pending User Action'
|
||||
AND r.[ApprovalStatus] = 'Pending Approval'
|
||||
THEN 'Pending plant approver'
|
||||
WHEN [Workflow Status] = ''
|
||||
AND r.[ApprovalStatus] = 'Not Submitted'
|
||||
THEN 'Req not submited'
|
||||
ELSE 'Unknown reason'
|
||||
END AS approvedStatus
|
||||
|
||||
FROM [dbo].[PORequisitions] r (nolock)
|
||||
|
||||
|
||||
|
||||
left join
|
||||
[dbo].[PurchaseRequisitions] as t (nolock) on
|
||||
t.[Requisition Number] = r.[POPRequisitionNumber]
|
||||
|
||||
|
||||
--where ApprovalStatus = 'Pending Approval'
|
||||
--and [Account Segment 2] = 80
|
||||
|
||||
where r.POPRequisitionNumber in ([reqsToCheck])
|
||||
|
||||
Order By r.POPRequisitionNumber
|
||||
@@ -5,6 +5,7 @@ import { db } from "../db/db.controller.js";
|
||||
import { logs } from "../db/schema/logs.schema.js";
|
||||
import { emitToRoom } from "../socket.io/roomEmitter.socket.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { notifySystemIssue } from "./logger.notify.js";
|
||||
//import build from "pino-abstract-transport";
|
||||
|
||||
export const logLevel = process.env.LOG_LEVEL || "info";
|
||||
@@ -45,6 +46,10 @@ const dbStream = new Writable({
|
||||
console.error(res.error);
|
||||
}
|
||||
|
||||
if (obj.notify) {
|
||||
notifySystemIssue(obj);
|
||||
}
|
||||
|
||||
if (obj.room) {
|
||||
emitToRoom(obj.room, res.data ? res.data[0] : obj);
|
||||
}
|
||||
|
||||
44
backend/logger/logger.notify.ts
Normal file
44
backend/logger/logger.notify.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* For all logging that has notify set to true well send an email to the system admins, if we have a discord webhook set well send it there as well
|
||||
*/
|
||||
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { user } from "../db/schema/auth.schema.js";
|
||||
import { sendEmail } from "../utils/sendEmail.utils.js";
|
||||
|
||||
type NotifyData = {
|
||||
module: string;
|
||||
submodule: string;
|
||||
hostname: string;
|
||||
msg: string;
|
||||
stack: unknown[];
|
||||
};
|
||||
|
||||
export const notifySystemIssue = async (data: NotifyData) => {
|
||||
// build the email out
|
||||
|
||||
const formattedError = Array.isArray(data.stack)
|
||||
? data.stack.map((e: any) => e.error || e)
|
||||
: data.stack;
|
||||
|
||||
const sysAdmin = await db
|
||||
.select()
|
||||
.from(user)
|
||||
.where(eq(user.role, "systemAdmin"));
|
||||
|
||||
await sendEmail({
|
||||
email: sysAdmin.map((r) => r.email).join("; ") ?? "cowchmonkey@gmail.com", // change to pull in system admin emails
|
||||
subject: `${data.hostname} has encountered a critical issue.`,
|
||||
template: "serverCritialIssue",
|
||||
context: {
|
||||
plant: data.hostname,
|
||||
module: data.module,
|
||||
subModule: data.submodule,
|
||||
message: data.msg,
|
||||
error: JSON.stringify(formattedError, null, 2),
|
||||
},
|
||||
});
|
||||
|
||||
// TODO: add discord
|
||||
};
|
||||
96
backend/notification/notification.manualTrigger.ts
Normal file
96
backend/notification/notification.manualTrigger.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { type Response, Router } from "express";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notifications } from "../db/schema/notifications.schema.js";
|
||||
import { auth } from "../utils/auth.utils.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/", async (req, res: Response) => {
|
||||
const hasPermissions = await auth.api.userHasPermission({
|
||||
body: {
|
||||
//userId: req?.user?.id,
|
||||
role: req.user?.roles as any,
|
||||
permissions: {
|
||||
notifications: ["readAll"], // This must match the structure in your access control
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!hasPermissions) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `You do not have permissions to be here`,
|
||||
data: [],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const { data: nName, error: nError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(eq(notifications.name, req.body.name)),
|
||||
);
|
||||
|
||||
if (nError) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "get",
|
||||
message: `There was an error getting the notifications `,
|
||||
data: [nError],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const { data: sub, error: sError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(eq(notifications.name, req.body.name)),
|
||||
);
|
||||
|
||||
if (sError) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "get",
|
||||
message: `There was an error getting the subs `,
|
||||
data: [sError],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const emailString = [
|
||||
...new Set(
|
||||
sub.flatMap((e: any) =>
|
||||
e.emails?.map((email: any) => email.trim().toLowerCase()),
|
||||
),
|
||||
),
|
||||
].join(";");
|
||||
|
||||
console.log(emailString);
|
||||
const { default: runFun } = await import(
|
||||
`./notification.${req.body.name.trim()}.js`
|
||||
);
|
||||
const manual = await runFun(nName[0], "blake.matthes@alpla.com");
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `Manual Trigger ran`,
|
||||
data: manual ?? [],
|
||||
status: 200,
|
||||
});
|
||||
});
|
||||
export default r;
|
||||
114
backend/notification/notification.qualityBlocking.ts
Normal file
114
backend/notification/notification.qualityBlocking.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notifications } from "../db/schema/notifications.schema.js";
|
||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import { delay } from "../utils/delay.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { sendEmail } from "../utils/sendEmail.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { v2QueryRun } from "../utils/pgConnectToLst.utils.js";
|
||||
|
||||
let shutoffv1 = false
|
||||
const func = async (data: any, emails: string) => {
|
||||
// TODO: remove this disable once all 17 plants are on this new lst
|
||||
if (!shutoffv1){
|
||||
v2QueryRun(`update public.notifications set active = false where name = '${data.name}'`)
|
||||
shutoffv1 = true
|
||||
}
|
||||
|
||||
|
||||
const { data: l, error: le } = (await tryCatch(
|
||||
db.select().from(notifications).where(eq(notifications.id, data.id)),
|
||||
)) as any;
|
||||
|
||||
if (le) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `${data.name} encountered an error while trying to get initial info`,
|
||||
data: le as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
// search the query db for the query by name
|
||||
const sqlQuery = sqlQuerySelector(`${data.name}`) as SqlQuery;
|
||||
// create the ignore audit logs ids
|
||||
|
||||
// get get the latest blocking order id that was sent
|
||||
const blockingOrderId = l[0].options[0].lastBlockingOrderIdSent ?? 69;
|
||||
|
||||
// run the check
|
||||
const { data: queryRun, error } = await tryCatch(
|
||||
prodQuery(
|
||||
sqlQuery.query.replace("[lastBlocking]", blockingOrderId),
|
||||
`Running notification query: ${l[0].name}`,
|
||||
),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: error as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (queryRun.data.length > 0) {
|
||||
for (const bo of queryRun.data) {
|
||||
const sentEmail = await sendEmail({
|
||||
email: emails,
|
||||
subject: bo.subject,
|
||||
template: "qualityBlocking",
|
||||
context: {
|
||||
items: bo,
|
||||
},
|
||||
});
|
||||
|
||||
if (!sentEmail?.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "email",
|
||||
message: `${l[0].name} failed to send the email`,
|
||||
data: sentEmail?.data as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
await delay(1500);
|
||||
|
||||
const { error: dbe } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({ options: [{ lastBlockingOrderIdSent: bo.blockingNumber }] })
|
||||
.where(eq(notifications.id, data.id)),
|
||||
);
|
||||
|
||||
if (dbe) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: dbe as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default func;
|
||||
@@ -9,9 +9,16 @@ import {
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { sendEmail } from "../utils/sendEmail.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { v2QueryRun } from "../utils/pgConnectToLst.utils.js";
|
||||
|
||||
let shutoffv1 = false
|
||||
const func = async (data: any, emails: string) => {
|
||||
// TODO: remove this disable once all 17 plants are on this new lst
|
||||
if (!shutoffv1){
|
||||
v2QueryRun(`update public.notifications set active = false where name = '${data.name}'`)
|
||||
shutoffv1 = true
|
||||
}
|
||||
|
||||
const reprint = async (data: any, emails: string) => {
|
||||
// TODO: do the actual logic for the notification.
|
||||
const { data: l, error: le } = (await tryCatch(
|
||||
db.select().from(notifications).where(eq(notifications.id, data.id)),
|
||||
)) as any;
|
||||
@@ -23,7 +30,7 @@ const reprint = async (data: any, emails: string) => {
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `${data.name} encountered an error while trying to get initial info`,
|
||||
data: [le],
|
||||
data: le as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
@@ -52,7 +59,7 @@ const reprint = async (data: any, emails: string) => {
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: [error],
|
||||
data: error as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
@@ -73,7 +80,7 @@ const reprint = async (data: any, emails: string) => {
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: [dbe],
|
||||
data: dbe as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
@@ -93,14 +100,14 @@ const reprint = async (data: any, emails: string) => {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "email",
|
||||
subModule: "notification",
|
||||
module: "notification",
|
||||
subModule: "email",
|
||||
message: `${l[0].name} failed to send the email`,
|
||||
data: [sentEmail],
|
||||
data: sentEmail?.data as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default reprint;
|
||||
export default func;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { Express } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import manual from "./notification.manualTrigger.js";
|
||||
import getNotifications from "./notification.route.js";
|
||||
import updateNote from "./notification.update.route.js";
|
||||
import deleteSub from "./notificationSub.delete.route.js";
|
||||
@@ -11,6 +12,7 @@ export const setupNotificationRoutes = (baseUrl: string, app: Express) => {
|
||||
//stats will be like this as we dont need to change this
|
||||
app.use(`${baseUrl}/api/notification`, requireAuth, getNotifications);
|
||||
app.use(`${baseUrl}/api/notification`, requireAuth, updateNote);
|
||||
app.use(`${baseUrl}/api/notification/manual`, requireAuth, manual);
|
||||
app.use(`${baseUrl}/api/notification/sub`, requireAuth, subs);
|
||||
app.use(`${baseUrl}/api/notification/sub`, requireAuth, newSub);
|
||||
app.use(`${baseUrl}/api/notification/sub`, requireAuth, updateSub);
|
||||
|
||||
@@ -22,7 +22,7 @@ const note: NewNotification[] = [
|
||||
"Checks for new blocking orders that have been entered, recommend to get the most recent order in here before activating.",
|
||||
active: false,
|
||||
interval: "10",
|
||||
options: [{ sentBlockingOrders: [{ timeStamp: "0", blockingOrder: 1 }] }],
|
||||
options: [{ lastBlockingOrderIdSent: 1 }],
|
||||
},
|
||||
{
|
||||
name: "alplaPurchaseHistory",
|
||||
|
||||
44
backend/prodSql/queries/qualityBlocking.sql
Normal file
44
backend/prodSql/queries/qualityBlocking.sql
Normal file
@@ -0,0 +1,44 @@
|
||||
use [test1_AlplaPROD2.0_Read]
|
||||
|
||||
SELECT
|
||||
'Alert! new blocking order: #' + cast(bo.HumanReadableId as varchar) + ' - ' + bo.ArticleVariantDescription as subject
|
||||
,cast(bo.[HumanReadableId] as varchar) as blockingNumber
|
||||
,bo.[ArticleVariantDescription] as article
|
||||
,cast(bo.[CustomerHumanReadableId] as varchar) + ' - ' + bo.[CustomerDescription] as customer
|
||||
,convert(varchar(10), bo.[BlockingDate], 101) + ' ' + convert(varchar(5), bo.[BlockingDate], 108) as blockingDate
|
||||
,cast(ArticleVariantHumanReadableId as varchar) + ' - ' + ArticleVariantDescription as av
|
||||
,case when bo.Remark = '' or bo.Remark is NULL then 'Please reach out to quality for the reason this was placed on hold as a remark was not entered during the blocking processs' else bo.Remark end as remark
|
||||
,cast(FORMAT(TotalAmountOfPieces, '###,###') as varchar) + ' / ' + cast(LoadingUnit as varchar) as peicesAndLoadingUnits
|
||||
,bo.ProductionLotHumanReadableId as lotNumber
|
||||
,cast(osd.IdBlockingDefectsGroup as varchar) + ' - ' + osd.Description as mainDefectGroup
|
||||
,cast(df.HumanReadableId as varchar) + ' - ' + os.Description as mainDefect
|
||||
,lot.MachineLocation as line
|
||||
--,*
|
||||
FROM [blocking].[BlockingOrder] (nolock) as bo
|
||||
|
||||
|
||||
/*** get the defect details ***/
|
||||
join
|
||||
[blocking].[BlockingDefect] (nolock) AS df
|
||||
on df.id = bo.MainDefectId
|
||||
|
||||
/*** pull description from 1.0 ***/
|
||||
left join
|
||||
[AlplaPROD_test1].[dbo].[T_BlockingDefects] (nolock) as os
|
||||
on os.IdGlobalBlockingDefect = df.HumanReadableId
|
||||
|
||||
/*** join in 1.0 defect group ***/
|
||||
left join
|
||||
[AlplaPROD_test1].[dbo].[T_BlockingDefectsGroups] (nolock) as osd
|
||||
on osd.IdBlockingDefectsGroup = os.IdBlockingDefectsGroup
|
||||
|
||||
left join
|
||||
[productionControlling].[ProducedLot] (nolock) as lot
|
||||
on lot.id = bo.ProductionLotId
|
||||
|
||||
|
||||
where
|
||||
bo.[BlockingDate] between getdate() - 2 and getdate() + 3 and
|
||||
bo.BlockingTrigger = 1 -- so we only get the ir blocking and not coa
|
||||
--and HumanReadableId NOT IN ([sentBlockingOrders])
|
||||
and bo.HumanReadableId > [lastBlocking]
|
||||
125
backend/purchase/puchase.gpCheck.ts
Normal file
125
backend/purchase/puchase.gpCheck.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { gpQuery } from "../gpSql/gpSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlGPQuery,
|
||||
sqlGpQuerySelector,
|
||||
} from "../gpSql/gpSqlQuerySelector.utils.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import type { GpStatus } from "../types/purhcaseTypes.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
|
||||
const log = createLogger({ module: "purchase", subModule: "gp" });
|
||||
|
||||
export const gpReqCheck = async (data: GpStatus[]) => {
|
||||
const gpReqCheck = sqlGpQuerySelector("reqCheck") as SqlGPQuery;
|
||||
const reqs = data.map((r) => r.req.trim());
|
||||
|
||||
if (!gpReqCheck.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "purchase",
|
||||
subModule: "query",
|
||||
message: `Error getting alpla purchase info`,
|
||||
data: gpReqCheck.message as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// check the initial req table
|
||||
const result = await gpQuery(
|
||||
gpReqCheck.query.replace(
|
||||
"[reqsToCheck]",
|
||||
data.map((r) => `'${r.req}'`).join(", ") ?? "xo",
|
||||
),
|
||||
"Get req info",
|
||||
);
|
||||
|
||||
log.debug(
|
||||
{},
|
||||
`There are ${result.data.length} reqs that need to be updated with there current status`,
|
||||
);
|
||||
|
||||
const firstFound = result.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const firstFoundSet = new Set(result.data.map((r) => r.req.trim()));
|
||||
|
||||
const missing1Reqs = reqs.filter((req) => !firstFoundSet.has(req));
|
||||
|
||||
//check if we have a recall on our req
|
||||
const reqCheck = await gpQuery(
|
||||
`select
|
||||
[Requisition Number] as req
|
||||
,case when [Workflow Status] = 'recall' then 'returned' else [Workflow Status] end as approvedStatus
|
||||
--,*
|
||||
from [dbo].[PurchaseRequisitions] where [Requisition Number] in (${missing1Reqs.map((r) => `'${r}'`).join(", ") ?? "xo"})`,
|
||||
"validate req is not in recall",
|
||||
);
|
||||
|
||||
const secondFound = reqCheck.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const secondFoundSet =
|
||||
new Set(reqCheck.data.map((r) => r.req.trim())) ?? [];
|
||||
|
||||
const missing2Reqs = missing1Reqs.filter((req) => !secondFoundSet.has(req));
|
||||
|
||||
// check if we have a po already
|
||||
const apoCheck = await gpQuery(
|
||||
`select
|
||||
SOPNUMBE
|
||||
,PONUMBER
|
||||
,reqStatus='converted'
|
||||
,*
|
||||
from alpla.dbo.sop60100 (nolock) where sopnumbe in (${missing2Reqs.map((r) => `'${r}'`).join(", ") ?? "xo"})`,
|
||||
"Get release info",
|
||||
);
|
||||
|
||||
const thirdRound = apoCheck.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const missing3Reqs = missing2Reqs.filter((req) => !secondFoundSet.has(req));
|
||||
|
||||
// remaining just got canceled or no longer exist
|
||||
const remaining = missing3Reqs.map((m) => ({
|
||||
req: m,
|
||||
approvedStatus: "canceled",
|
||||
}));
|
||||
|
||||
const allFound = [
|
||||
...firstFound,
|
||||
...secondFound,
|
||||
...thirdRound,
|
||||
...remaining,
|
||||
];
|
||||
|
||||
const statusMap = new Map(
|
||||
allFound.map((r: any) => [r.req, r.approvedStatus]),
|
||||
);
|
||||
|
||||
const updateData = data.map((row) => ({
|
||||
id: row.id,
|
||||
//req: row.req,
|
||||
approvedStatus: statusMap.get(row.req.trim()) ?? null,
|
||||
}));
|
||||
|
||||
return updateData;
|
||||
} catch (error: any) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "purchase",
|
||||
subModule: "gpChecks",
|
||||
message: error.message,
|
||||
data: error.stack as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -2,7 +2,7 @@
|
||||
* This will monitor alpla purchase
|
||||
*/
|
||||
|
||||
import { eq } from "drizzle-orm";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import {
|
||||
alplaPurchaseHistory,
|
||||
@@ -15,10 +15,12 @@ import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import type { GpStatus, StatusUpdate } from "../types/purhcaseTypes.js";
|
||||
import { createCronJob } from "../utils/croner.utils.js";
|
||||
import { delay } from "../utils/delay.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { gpReqCheck } from "./puchase.gpCheck.js";
|
||||
|
||||
const log = createLogger({ module: "purchase", subModule: "purchaseMonitor" });
|
||||
|
||||
@@ -37,8 +39,8 @@ export const monitorAlplaPurchase = async () => {
|
||||
module: "purchase",
|
||||
subModule: "query",
|
||||
message: `Error getting alpla purchase info`,
|
||||
data: [sqlQuery.message],
|
||||
notify: false,
|
||||
data: sqlQuery.message as any,
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -76,7 +78,7 @@ export const monitorAlplaPurchase = async () => {
|
||||
|
||||
if (error) {
|
||||
log.error(
|
||||
{ error },
|
||||
{ error, notify: true },
|
||||
"There was an error adding alpla purchase history",
|
||||
);
|
||||
}
|
||||
@@ -84,14 +86,147 @@ export const monitorAlplaPurchase = async () => {
|
||||
await delay(500);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(
|
||||
{ error: e },
|
||||
log.error(
|
||||
{ error: e, notify: true },
|
||||
"Error occurred while running the monitor job",
|
||||
);
|
||||
log.error({ error: e }, "Error occurred while running the monitor job");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// re-pull re-pull everything that has approvedStatus is pending
|
||||
|
||||
const { data: allReq, error: errorReq } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(alplaPurchaseHistory)
|
||||
.where(eq(alplaPurchaseHistory.approvedStatus, "new")),
|
||||
);
|
||||
|
||||
// if theres no reqs just end meow
|
||||
if (errorReq) {
|
||||
log.error(
|
||||
{ stack: errorReq, notify: true },
|
||||
"There was an error getting history data",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug({}, `There are ${allReq.length} pending reqs to be updated`);
|
||||
|
||||
if (!allReq.length) {
|
||||
log.debug({}, "There are not reqs to be processed");
|
||||
return;
|
||||
}
|
||||
/**
|
||||
* approvedStatus
|
||||
* remark = '' then pending req/manual po
|
||||
* pending = pending
|
||||
* approved = approved
|
||||
*
|
||||
*/
|
||||
|
||||
// the flow for all the fun stuff
|
||||
|
||||
const needsGpLookup: GpStatus[] = [];
|
||||
const updates: StatusUpdate[] = [];
|
||||
|
||||
for (const row of allReq ?? []) {
|
||||
const remark = row.remark?.toLowerCase() ?? "";
|
||||
|
||||
if (remark === "") {
|
||||
updates.push({ id: row.id, approvedStatus: "initial" });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (remark.includes("rct")) {
|
||||
updates.push({ id: row.id, approvedStatus: "received" });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (remark.includes("apo")) {
|
||||
updates.push({ id: row.id, approvedStatus: "approved" });
|
||||
continue;
|
||||
}
|
||||
|
||||
// not handled locally, defer to GP lookup
|
||||
needsGpLookup.push({ id: row.id, req: row.remark?.trim() ?? "" });
|
||||
}
|
||||
|
||||
const gpSmash = (await gpReqCheck(needsGpLookup)) as StatusUpdate[];
|
||||
|
||||
const merge = [...updates, ...gpSmash];
|
||||
|
||||
if (merge.length > 0) {
|
||||
await db.execute(sql`
|
||||
UPDATE ${alplaPurchaseHistory}
|
||||
SET approved_status = CASE
|
||||
${sql.join(
|
||||
merge.map(
|
||||
(row) =>
|
||||
sql`WHEN ${alplaPurchaseHistory.id} = ${row.id} THEN ${row.approvedStatus}`,
|
||||
),
|
||||
sql` `,
|
||||
)}
|
||||
ELSE approved_status
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE ${alplaPurchaseHistory.id} IN (
|
||||
${sql.join(
|
||||
merge.map((row) => sql`${row.id}`),
|
||||
sql`, `,
|
||||
)}
|
||||
)
|
||||
`);
|
||||
log.info(
|
||||
{},
|
||||
"All alpla purchase orders have been processed and updated",
|
||||
);
|
||||
}
|
||||
|
||||
// for reqs, create a string of reqs then run them through the gp req table to see there status. then update in lst ass see fit.
|
||||
|
||||
// then double check if we have all reqs covered, for the reqs missing from above restring them and check the po table
|
||||
|
||||
// these ones will be called to as converted to po
|
||||
|
||||
// for the remaining reqs from above check the actual req table to see the status of it if the workflow is set at Recall this means a change was requested from purchasing team and needs to be re approved
|
||||
|
||||
// for all remaining reqs we change them to replace/canceled
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// const updates = (allReq ?? [])
|
||||
// .map((row) => {
|
||||
// const remark = row.remark?.toLowerCase() ?? "";
|
||||
|
||||
// let approvedStatus: string | null = null;
|
||||
|
||||
// // priority order matters here
|
||||
// if (remark === "") {
|
||||
// approvedStatus = "initial";
|
||||
// } else if (remark.includes("rct")) {
|
||||
// approvedStatus = "received";
|
||||
// } else if (remark.includes("apo")) {
|
||||
// approvedStatus = "approved";
|
||||
// }
|
||||
|
||||
// // add your next 4 checks here
|
||||
// // else if (...) approvedStatus = "somethingElse";
|
||||
|
||||
// if (!approvedStatus) return null;
|
||||
|
||||
// return {
|
||||
// id: row.id,
|
||||
// approvedStatus,
|
||||
// };
|
||||
// })
|
||||
// .filter(
|
||||
// (
|
||||
// row,
|
||||
// ): row is {
|
||||
// id: string;
|
||||
// approvedStatus: string;
|
||||
// } => row !== null,
|
||||
// );
|
||||
|
||||
@@ -4,6 +4,7 @@ import { setupAuthRoutes } from "./auth/auth.routes.js";
|
||||
// import the routes and route setups
|
||||
import { setupApiDocsRoutes } from "./configs/scaler.config.js";
|
||||
import { setupDatamartRoutes } from "./datamart/datamart.routes.js";
|
||||
import { setupGPSqlRoutes } from "./gpSql/gpSql.routes.js";
|
||||
import { setupNotificationRoutes } from "./notification/notification.routes.js";
|
||||
import { setupOCPRoutes } from "./ocp/ocp.routes.js";
|
||||
import { setupOpendockRoutes } from "./opendock/opendock.routes.js";
|
||||
@@ -16,6 +17,7 @@ export const setupRoutes = (baseUrl: string, app: Express) => {
|
||||
setupSystemRoutes(baseUrl, app);
|
||||
setupApiDocsRoutes(baseUrl, app);
|
||||
setupProdSqlRoutes(baseUrl, app);
|
||||
setupGPSqlRoutes(baseUrl, app);
|
||||
setupDatamartRoutes(baseUrl, app);
|
||||
setupAuthRoutes(baseUrl, app);
|
||||
setupUtilsRoutes(baseUrl, app);
|
||||
|
||||
@@ -4,6 +4,7 @@ import createApp from "./app.js";
|
||||
import { db } from "./db/db.controller.js";
|
||||
import { dbCleanup } from "./db/dbCleanup.controller.js";
|
||||
import { type Setting, settings } from "./db/schema/settings.schema.js";
|
||||
import { connectGPSql } from "./gpSql/gpSqlConnection.controller.js";
|
||||
import { createLogger } from "./logger/logger.controller.js";
|
||||
import { startNotifications } from "./notification/notification.controller.js";
|
||||
import { createNotifications } from "./notification/notifications.master.js";
|
||||
@@ -14,6 +15,7 @@ import { monitorAlplaPurchase } from "./purchase/purchase.controller.js";
|
||||
import { setupSocketIORoutes } from "./socket.io/serverSetup.js";
|
||||
import { baseSettingValidationCheck } from "./system/settingsBase.controller.js";
|
||||
import { createCronJob } from "./utils/croner.utils.js";
|
||||
import { sendEmail } from "./utils/sendEmail.utils.js";
|
||||
|
||||
const port = Number(process.env.PORT) || 3000;
|
||||
export let systemSettings: Setting[] = [];
|
||||
@@ -28,6 +30,7 @@ const start = async () => {
|
||||
|
||||
// triggering long lived processes
|
||||
connectProdSql();
|
||||
connectGPSql();
|
||||
|
||||
// trigger startup processes these must run before anything else can run
|
||||
await baseSettingValidationCheck();
|
||||
@@ -60,6 +63,23 @@ const start = async () => {
|
||||
startNotifications();
|
||||
}, 5 * 1000);
|
||||
|
||||
process.on("uncaughtException", async (err) => {
|
||||
console.error("Uncaught Exception:", err);
|
||||
//await closePool();
|
||||
const emailData = {
|
||||
email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused.
|
||||
subject: `${os.hostname()} has just encountered a crash.`,
|
||||
template: "serverCrash",
|
||||
context: {
|
||||
error: err,
|
||||
plant: `${os.hostname()}`,
|
||||
},
|
||||
};
|
||||
|
||||
await sendEmail(emailData);
|
||||
//process.exit(1);
|
||||
});
|
||||
|
||||
server.listen(port, async () => {
|
||||
log.info(
|
||||
`Listening on http://${os.hostname()}:${port}${baseUrl}, logging in ${process.env.LOG_LEVEL}, current ENV ${process.env.NODE_ENV ? process.env.NODE_ENV : "development"}`,
|
||||
|
||||
9
backend/types/purhcaseTypes.ts
Normal file
9
backend/types/purhcaseTypes.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export type GpStatus = {
|
||||
id: string;
|
||||
req: string;
|
||||
};
|
||||
|
||||
export type StatusUpdate = {
|
||||
id: string;
|
||||
approvedStatus: string;
|
||||
};
|
||||
73
backend/utils/mailViews/qualityBlocking.hbs
Normal file
73
backend/utils/mailViews/qualityBlocking.hbs
Normal file
@@ -0,0 +1,73 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
|
||||
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
|
||||
<style>
|
||||
.email-wrapper {
|
||||
max-width: 80%; /* Limit width to 80% of the window */
|
||||
margin: 0 auto; /* Center the content horizontally */
|
||||
}
|
||||
.email-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.email-table td {
|
||||
vertical-align: top;
|
||||
padding: 10px;
|
||||
border: 1px solid #000;
|
||||
border-radius: 25px; /* Rounded corners */
|
||||
background-color: #f0f0f0; /* Optional: Add a background color */
|
||||
}
|
||||
.email-table h2 {
|
||||
margin: 0;
|
||||
}
|
||||
.remarks {
|
||||
border: 1px solid black;
|
||||
padding: 10px;
|
||||
background-color: #f0f0f0;
|
||||
border-radius: 25px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="email-wrapper">
|
||||
<p>All,</p>
|
||||
<p>Please see the new blocking order that was created.</p>
|
||||
<div>
|
||||
<div class="email-table">
|
||||
<table>
|
||||
<tr>
|
||||
<td>
|
||||
<p><strong>Blocking number: </strong>{{items.blockingNumber}}</p>
|
||||
<p><strong>Blocking Date: </strong>{{items.blockingDate}}</p>
|
||||
<p><strong>Article: </strong>{{items.av}}</p>
|
||||
<p><strong>Production Lot: </strong>{{items.lotNumber}}</p>
|
||||
<p><strong>Line: </strong>{{items.line}}</p>
|
||||
</td>
|
||||
<td>
|
||||
<p><strong>Customer: </strong>{{items.customer}}</p>
|
||||
<p><strong>Blocked pieces /LUs: </strong>{{items.peicesAndLoadingUnits}}</p>
|
||||
<p><strong>Main defect group: </strong>{{items.mainDefectGroup}}</p>
|
||||
<p><strong>Main defect: </strong>{{items.mainDefect}}</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div class="remarks">
|
||||
<h4>Remarks:</h4>
|
||||
<p>{{items.remark}}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<br>
|
||||
<p>For further questions please reach out to quality.</p>
|
||||
<p>Thank you,</p>
|
||||
<p>Quality Department</p>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
35
backend/utils/mailViews/serverCrash.hbs
Normal file
35
backend/utils/mailViews/serverCrash.hbs
Normal file
@@ -0,0 +1,35 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
{{!--<title>Order Summary</title> --}}
|
||||
{{> styles}}
|
||||
<style>
|
||||
pre {
|
||||
background-color: #f8f9fa;
|
||||
color: #d63384;
|
||||
padding: 10px;
|
||||
border-radius: 5px;
|
||||
white-space: pre-wrap;
|
||||
font-family: monospace;
|
||||
}
|
||||
</style>
|
||||
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
|
||||
</head>
|
||||
<body>
|
||||
<h3>{{plant}},<br/> Has encountered an unexpected error.</h1>
|
||||
<p>
|
||||
Please see below the stack error from the crash.
|
||||
</p>
|
||||
<hr/>
|
||||
<div>
|
||||
<h3>Error Message: </h3>
|
||||
<p>{{error.message}}</p>
|
||||
</div>
|
||||
<hr/>
|
||||
<div>
|
||||
<h3>Stack trace</h3>
|
||||
<pre>{{{error.stack}}}</pre>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
36
backend/utils/mailViews/serverCritialIssue.hbs
Normal file
36
backend/utils/mailViews/serverCritialIssue.hbs
Normal file
@@ -0,0 +1,36 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
{{!--<title>Order Summary</title> --}}
|
||||
{{> styles}}
|
||||
<style>
|
||||
pre {
|
||||
background-color: #f8f9fa;
|
||||
color: #d63384;
|
||||
padding: 10px;
|
||||
border-radius: 5px;
|
||||
white-space: pre-wrap;
|
||||
font-family: monospace;
|
||||
}
|
||||
</style>
|
||||
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
|
||||
</head>
|
||||
<body>
|
||||
<h3>{{plant}},<br/> Has encountered an error.</h1>
|
||||
<p>
|
||||
The below error came from Module: {{module}}, Submodule: {{submodule}}.
|
||||
</p>
|
||||
<p>The error below is considered to be critical and should be addressed</p>
|
||||
<hr/>
|
||||
<div>
|
||||
<h3>Error Message: </h3>
|
||||
<p>{{message}}</p>
|
||||
</div>
|
||||
<hr/>
|
||||
<div>
|
||||
<h3>Stack trace</h3>
|
||||
<pre>{{{error}}}</pre>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
41
backend/utils/pgConnectToLst.utils.ts
Normal file
41
backend/utils/pgConnectToLst.utils.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import pkg from "pg";
|
||||
const { Pool } = pkg;
|
||||
|
||||
const baseConfig = {
|
||||
host: process.env.DATABASE_HOST ?? "localhost",
|
||||
port: parseInt(process.env.DATABASE_PORT ?? "5433", 10),
|
||||
user: process.env.DATABASE_USER,
|
||||
password: process.env.DATABASE_PASSWORD,
|
||||
};
|
||||
|
||||
// Pools (one per DB)
|
||||
const v1Pool = new Pool({
|
||||
...baseConfig,
|
||||
database: "lst",
|
||||
});
|
||||
|
||||
const v2Pool = new Pool({
|
||||
...baseConfig,
|
||||
database: "lst_db",
|
||||
});
|
||||
|
||||
// Query helpers
|
||||
export const v1QueryRun = async (query: string, params?: any[]) => {
|
||||
try {
|
||||
const res = await v1Pool.query(query, params);
|
||||
return res;
|
||||
} catch (err) {
|
||||
console.error("V1 query error:", err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
export const v2QueryRun = async (query: string, params?: any[]) => {
|
||||
try {
|
||||
const res = await v2Pool.query(query, params);
|
||||
return res;
|
||||
} catch (err) {
|
||||
console.error("V2 query error:", err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
@@ -31,7 +31,10 @@ interface Data<T = unknown[]> {
|
||||
| "post"
|
||||
| "notification"
|
||||
| "delete"
|
||||
| "printing";
|
||||
| "printing"
|
||||
| "gpSql"
|
||||
| "email"
|
||||
| "gpChecks";
|
||||
level: "info" | "error" | "debug" | "fatal";
|
||||
message: string;
|
||||
room?: string;
|
||||
@@ -63,13 +66,14 @@ export const returnFunc = (data: Data) => {
|
||||
log.info({ notify: notify, room }, data.message);
|
||||
break;
|
||||
case "error":
|
||||
log.error({ notify: notify, error: data.data, room }, data.message);
|
||||
log.error({ notify: notify, stack: data.data ?? [], room }, data.message);
|
||||
|
||||
break;
|
||||
case "debug":
|
||||
log.debug({ notify: notify, room }, data.message);
|
||||
log.debug({ notify: notify, stack: data.data ?? [], room }, data.message);
|
||||
break;
|
||||
case "fatal":
|
||||
log.fatal({ notify: notify, room }, data.message);
|
||||
log.fatal({ notify: notify, stack: data.data ?? [], room }, data.message);
|
||||
}
|
||||
|
||||
// api section to return
|
||||
|
||||
@@ -88,7 +88,7 @@ export const sendEmail = async (data: EmailData) => {
|
||||
level: "error",
|
||||
module: "utils",
|
||||
subModule: "sendmail",
|
||||
message: `Error sending Email to : ${data.email}`,
|
||||
message: `Error sending Email to : ${data.email}, Error: ${error.message}`,
|
||||
data: [{ error: error }],
|
||||
notify: false,
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
vars {
|
||||
url: http://localhost:3600/lst
|
||||
url: http://localhost:3000/lst
|
||||
readerIp: 10.44.14.215
|
||||
}
|
||||
vars:secret [
|
||||
|
||||
@@ -14,7 +14,7 @@ body:json {
|
||||
{
|
||||
"userId":"m6AbQXFwOXoX3YKLfwWgq2LIdDqS5jqv",
|
||||
"notificationId": "0399eb2a-39df-48b7-9f1c-d233cec94d2e",
|
||||
"emails": ["blake.mattes@alpla.com","cowchmonkey@gmail.com"]
|
||||
"emails": ["blake.matthes@alpla.com","blake.matthes@alpla.com"]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
1486
frontend/package-lock.json
generated
1486
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -26,6 +26,8 @@
|
||||
"radix-ui": "^1.4.3",
|
||||
"react": "^19.1.1",
|
||||
"react-dom": "^19.1.1",
|
||||
"react-markdown": "^10.1.0",
|
||||
"remark-gfm": "^4.0.1",
|
||||
"shadcn": "^4.0.8",
|
||||
"socket.io-client": "^4.8.3",
|
||||
"sonner": "^2.0.7",
|
||||
@@ -36,6 +38,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@tailwindcss/typography": "^0.5.19",
|
||||
"@tanstack/router-plugin": "^1.166.7",
|
||||
"@types/react": "^19.1.13",
|
||||
"@types/react-dom": "^19.1.9",
|
||||
|
||||
BIN
frontend/public/imgs/docs/notifications/dk_profile.png
Normal file
BIN
frontend/public/imgs/docs/notifications/dk_profile.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 5.8 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 27 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 20 KiB |
BIN
frontend/public/imgs/docs/notifications/lt_profile.png
Normal file
BIN
frontend/public/imgs/docs/notifications/lt_profile.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 5.9 KiB |
BIN
frontend/public/imgs/docs/notifications/lt_qualityBlocking.png
Normal file
BIN
frontend/public/imgs/docs/notifications/lt_qualityBlocking.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 KiB |
BIN
frontend/public/imgs/docs/notifications/lt_reprints.png
Normal file
BIN
frontend/public/imgs/docs/notifications/lt_reprints.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
105
frontend/src/components/Sidebar/DocBar.tsx
Normal file
105
frontend/src/components/Sidebar/DocBar.tsx
Normal file
@@ -0,0 +1,105 @@
|
||||
import { Link, useRouterState } from "@tanstack/react-router";
|
||||
import { ChevronRight } from "lucide-react";
|
||||
import {
|
||||
Collapsible,
|
||||
CollapsibleContent,
|
||||
CollapsibleTrigger,
|
||||
} from "../ui/collapsible";
|
||||
|
||||
import {
|
||||
SidebarGroup,
|
||||
SidebarGroupContent,
|
||||
SidebarGroupLabel,
|
||||
SidebarMenu,
|
||||
SidebarMenuButton,
|
||||
SidebarMenuItem,
|
||||
SidebarMenuSub,
|
||||
SidebarMenuSubButton,
|
||||
SidebarMenuSubItem,
|
||||
useSidebar,
|
||||
} from "../ui/sidebar";
|
||||
|
||||
const docs = [
|
||||
{
|
||||
title: "Notifications",
|
||||
url: "/intro",
|
||||
//icon,
|
||||
isActive: window.location.pathname.includes("notifications") ?? false,
|
||||
items: [
|
||||
{
|
||||
title: "Reprints",
|
||||
url: "/reprints",
|
||||
},
|
||||
{
|
||||
title: "New Blocking order",
|
||||
url: "/qualityBlocking",
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
export default function DocBar() {
|
||||
const { setOpen } = useSidebar();
|
||||
const pathname = useRouterState({
|
||||
select: (s) => s.location.pathname,
|
||||
});
|
||||
|
||||
const isNotifications = pathname.includes("notifications");
|
||||
|
||||
return (
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Docs</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
<SidebarMenuItem key={"docs"}>
|
||||
<SidebarMenuButton asChild>
|
||||
<Link to={"/docs"} onClick={() => setOpen(false)}>
|
||||
{/* <item.icon /> */}
|
||||
<span>{"Intro"}</span>
|
||||
</Link>
|
||||
</SidebarMenuButton>
|
||||
</SidebarMenuItem>
|
||||
</SidebarMenu>
|
||||
<SidebarMenu>
|
||||
{docs.map((item) => (
|
||||
<Collapsible
|
||||
key={item.title}
|
||||
asChild
|
||||
defaultOpen={isNotifications}
|
||||
className="group/collapsible"
|
||||
>
|
||||
<SidebarMenuItem>
|
||||
<CollapsibleTrigger asChild>
|
||||
<SidebarMenuButton tooltip={item.title}>
|
||||
<Link
|
||||
to={"/docs/$"}
|
||||
params={{ _splat: `notifications${item.url}` }}
|
||||
>
|
||||
{item.title}
|
||||
</Link>
|
||||
<ChevronRight className="ml-auto transition-transform duration-200 group-data-[state=open]/collapsible:rotate-90" />
|
||||
</SidebarMenuButton>
|
||||
</CollapsibleTrigger>
|
||||
<CollapsibleContent>
|
||||
<SidebarMenuSub>
|
||||
{item.items?.map((subItem) => (
|
||||
<SidebarMenuSubItem key={subItem.title}>
|
||||
<SidebarMenuSubButton asChild>
|
||||
<Link
|
||||
to={"/docs/$"}
|
||||
params={{ _splat: `notifications${subItem.url}` }}
|
||||
>
|
||||
{subItem.title}
|
||||
</Link>
|
||||
</SidebarMenuSubButton>
|
||||
</SidebarMenuSubItem>
|
||||
))}
|
||||
</SidebarMenuSub>
|
||||
</CollapsibleContent>
|
||||
</SidebarMenuItem>
|
||||
</Collapsible>
|
||||
))}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
);
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
} from "@/components/ui/sidebar";
|
||||
import { useSession } from "@/lib/auth-client";
|
||||
import AdminSidebar from "./AdminBar";
|
||||
import DocBar from "./DocBar";
|
||||
|
||||
export function AppSidebar() {
|
||||
const { data: session } = useSession();
|
||||
@@ -21,6 +22,7 @@ export function AppSidebar() {
|
||||
<SidebarMenu>
|
||||
<SidebarMenuItem>
|
||||
<SidebarContent>
|
||||
<DocBar/>
|
||||
{session &&
|
||||
(session.user.role === "admin" ||
|
||||
session.user.role === "systemAdmin") && (
|
||||
|
||||
76
frontend/src/components/ui/alert.tsx
Normal file
76
frontend/src/components/ui/alert.tsx
Normal file
@@ -0,0 +1,76 @@
|
||||
import * as React from "react"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const alertVariants = cva(
|
||||
"group/alert relative grid w-full gap-0.5 rounded-lg border px-2.5 py-2 text-left text-sm has-data-[slot=alert-action]:relative has-data-[slot=alert-action]:pr-18 has-[>svg]:grid-cols-[auto_1fr] has-[>svg]:gap-x-2 *:[svg]:row-span-2 *:[svg]:translate-y-0.5 *:[svg]:text-current *:[svg:not([class*='size-'])]:size-4",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default: "bg-card text-card-foreground",
|
||||
destructive:
|
||||
"bg-card text-destructive *:data-[slot=alert-description]:text-destructive/90 *:[svg]:text-current",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
function Alert({
|
||||
className,
|
||||
variant,
|
||||
...props
|
||||
}: React.ComponentProps<"div"> & VariantProps<typeof alertVariants>) {
|
||||
return (
|
||||
<div
|
||||
data-slot="alert"
|
||||
role="alert"
|
||||
className={cn(alertVariants({ variant }), className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function AlertTitle({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="alert-title"
|
||||
className={cn(
|
||||
"font-medium group-has-[>svg]/alert:col-start-2 [&_a]:underline [&_a]:underline-offset-3 [&_a]:hover:text-foreground",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function AlertDescription({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="alert-description"
|
||||
className={cn(
|
||||
"text-sm text-balance text-muted-foreground md:text-pretty [&_a]:underline [&_a]:underline-offset-3 [&_a]:hover:text-foreground [&_p:not(:last-child)]:mb-4",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function AlertAction({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="alert-action"
|
||||
className={cn("absolute top-2 right-2", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Alert, AlertTitle, AlertDescription, AlertAction }
|
||||
31
frontend/src/components/ui/collapsible.tsx
Normal file
31
frontend/src/components/ui/collapsible.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
import { Collapsible as CollapsiblePrimitive } from "radix-ui"
|
||||
|
||||
function Collapsible({
|
||||
...props
|
||||
}: React.ComponentProps<typeof CollapsiblePrimitive.Root>) {
|
||||
return <CollapsiblePrimitive.Root data-slot="collapsible" {...props} />
|
||||
}
|
||||
|
||||
function CollapsibleTrigger({
|
||||
...props
|
||||
}: React.ComponentProps<typeof CollapsiblePrimitive.CollapsibleTrigger>) {
|
||||
return (
|
||||
<CollapsiblePrimitive.CollapsibleTrigger
|
||||
data-slot="collapsible-trigger"
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function CollapsibleContent({
|
||||
...props
|
||||
}: React.ComponentProps<typeof CollapsiblePrimitive.CollapsibleContent>) {
|
||||
return (
|
||||
<CollapsiblePrimitive.CollapsibleContent
|
||||
data-slot="collapsible-content"
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export { Collapsible, CollapsibleTrigger, CollapsibleContent }
|
||||
62
frontend/src/docs/notifications/intro.tsx
Normal file
62
frontend/src/docs/notifications/intro.tsx
Normal file
@@ -0,0 +1,62 @@
|
||||
export default function into() {
|
||||
return (
|
||||
<div className="mx-auto w-full max-w-4xl px-6 py-8">
|
||||
<h1 className="text-3xl underline p-2">Notifications</h1>
|
||||
|
||||
<p className="p-2">
|
||||
All notifications are a subscription based, please open the menu and
|
||||
select the notification you would like to know more info about
|
||||
</p>
|
||||
|
||||
<hr />
|
||||
<p>To subscribe to a notification</p>
|
||||
<ol className="list-decimal list-inside">
|
||||
<li>Click on your profile</li>
|
||||
|
||||
<img
|
||||
src="/lst/app/imgs/docs/notifications/lt_profile.png"
|
||||
alt="Reprint notification example"
|
||||
className="m-2 rounded-lg border-2"
|
||||
/>
|
||||
<li>Click account</li>
|
||||
<li>Select the notification you would like to subscribe to.</li>
|
||||
<img
|
||||
src="/lst/app/imgs/docs/notifications/lt_notification_select.png"
|
||||
alt="Reprint notification example"
|
||||
className="m-2 rounded-lg border-2"
|
||||
/>
|
||||
<li>
|
||||
If you want to have more people on the notification you can add more
|
||||
emails by clicking the add email button.{" "}
|
||||
<p className="text-sm underline">
|
||||
Please note that each user can subscribe on there own so you do not
|
||||
need to add others unless you want to add them.
|
||||
</p>
|
||||
</li>
|
||||
<li>When you are ready click subscribe</li>
|
||||
</ol>
|
||||
<br />
|
||||
<p className="">
|
||||
NOTE: you can select the same notification and add more people or just
|
||||
your self only, when you do this it will override you current
|
||||
subscription and add / remove the emails
|
||||
</p>
|
||||
<hr className="m-2" />
|
||||
<div>
|
||||
<p>
|
||||
The table at the bottom of your profile is where all of your current
|
||||
subscriptions will be at.
|
||||
</p>
|
||||
<p>
|
||||
Clicking the trash can will remove the notifications from sending you
|
||||
emails
|
||||
</p>
|
||||
<img
|
||||
src="/lst/app/imgs/docs/notifications/lt_notification_table.png"
|
||||
alt="Reprint notification example"
|
||||
className="m-2 rounded-lg border-2"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
19
frontend/src/docs/notifications/qualityBlocking.tsx
Normal file
19
frontend/src/docs/notifications/qualityBlocking.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
export default function reprints() {
|
||||
return (
|
||||
<div className="mx-auto w-full max-w-4xl px-6 py-8">
|
||||
<h1 className="text-3xl underline p-2">Quality Blocking</h1>
|
||||
|
||||
<p className="p-2">
|
||||
When a new blocking order is created a new alert will be sent out to all
|
||||
users subscribed. if there are multiple blocking orders created between
|
||||
checks you can expect to get multiple emails. below you will see an
|
||||
example of a blocking email that is sent out
|
||||
</p>
|
||||
<img
|
||||
src="/lst/app/imgs/docs/notifications/lt_qualityBlocking.png"
|
||||
alt="Reprint notification example"
|
||||
className="m-2 rounded-lg border-2"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
18
frontend/src/docs/notifications/reprints.tsx
Normal file
18
frontend/src/docs/notifications/reprints.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
export default function reprints() {
|
||||
return (
|
||||
<div className="mx-auto w-full max-w-4xl px-6 py-8">
|
||||
<h1 className="text-3xl underline p-2">Reprints</h1>
|
||||
|
||||
<p className="p-2">
|
||||
The reprint alert will monitor for labels that have been printed within
|
||||
a defined time. when a label is printed in the defined time an email
|
||||
will sent out that looks similar to the below
|
||||
</p>
|
||||
<img
|
||||
src="/lst/app/imgs/docs/notifications/lt_reprints.png"
|
||||
alt="Reprint notification example"
|
||||
className="m-2 rounded-lg border-2"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
26
frontend/src/lib/docs.ts
Normal file
26
frontend/src/lib/docs.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { ComponentType } from "react";
|
||||
|
||||
const modules = import.meta.glob("../docs/**/*.tsx", {
|
||||
eager: true,
|
||||
});
|
||||
|
||||
type DocModule = {
|
||||
default: ComponentType;
|
||||
};
|
||||
|
||||
const docsMap: Record<string, ComponentType> = {};
|
||||
|
||||
for (const path in modules) {
|
||||
const mod = modules[path] as DocModule;
|
||||
|
||||
const slug = path
|
||||
.replace("../docs/", "")
|
||||
.replace(".tsx", "");
|
||||
|
||||
// "notifications/intro"
|
||||
docsMap[slug] = mod.default;
|
||||
}
|
||||
|
||||
export function getDoc(slug: string) {
|
||||
return docsMap[slug];
|
||||
}
|
||||
@@ -11,6 +11,8 @@
|
||||
import { Route as rootRouteImport } from './routes/__root'
|
||||
import { Route as AboutRouteImport } from './routes/about'
|
||||
import { Route as IndexRouteImport } from './routes/index'
|
||||
import { Route as DocsIndexRouteImport } from './routes/docs/index'
|
||||
import { Route as DocsSplatRouteImport } from './routes/docs/$'
|
||||
import { Route as AdminSettingsRouteImport } from './routes/admin/settings'
|
||||
import { Route as AdminNotificationsRouteImport } from './routes/admin/notifications'
|
||||
import { Route as AdminLogsRouteImport } from './routes/admin/logs'
|
||||
@@ -29,6 +31,16 @@ const IndexRoute = IndexRouteImport.update({
|
||||
path: '/',
|
||||
getParentRoute: () => rootRouteImport,
|
||||
} as any)
|
||||
const DocsIndexRoute = DocsIndexRouteImport.update({
|
||||
id: '/docs/',
|
||||
path: '/docs/',
|
||||
getParentRoute: () => rootRouteImport,
|
||||
} as any)
|
||||
const DocsSplatRoute = DocsSplatRouteImport.update({
|
||||
id: '/docs/$',
|
||||
path: '/docs/$',
|
||||
getParentRoute: () => rootRouteImport,
|
||||
} as any)
|
||||
const AdminSettingsRoute = AdminSettingsRouteImport.update({
|
||||
id: '/admin/settings',
|
||||
path: '/admin/settings',
|
||||
@@ -72,6 +84,8 @@ export interface FileRoutesByFullPath {
|
||||
'/admin/logs': typeof AdminLogsRoute
|
||||
'/admin/notifications': typeof AdminNotificationsRoute
|
||||
'/admin/settings': typeof AdminSettingsRoute
|
||||
'/docs/$': typeof DocsSplatRoute
|
||||
'/docs/': typeof DocsIndexRoute
|
||||
'/user/profile': typeof authUserProfileRoute
|
||||
'/user/resetpassword': typeof authUserResetpasswordRoute
|
||||
'/user/signup': typeof authUserSignupRoute
|
||||
@@ -83,6 +97,8 @@ export interface FileRoutesByTo {
|
||||
'/admin/logs': typeof AdminLogsRoute
|
||||
'/admin/notifications': typeof AdminNotificationsRoute
|
||||
'/admin/settings': typeof AdminSettingsRoute
|
||||
'/docs/$': typeof DocsSplatRoute
|
||||
'/docs': typeof DocsIndexRoute
|
||||
'/user/profile': typeof authUserProfileRoute
|
||||
'/user/resetpassword': typeof authUserResetpasswordRoute
|
||||
'/user/signup': typeof authUserSignupRoute
|
||||
@@ -95,6 +111,8 @@ export interface FileRoutesById {
|
||||
'/admin/logs': typeof AdminLogsRoute
|
||||
'/admin/notifications': typeof AdminNotificationsRoute
|
||||
'/admin/settings': typeof AdminSettingsRoute
|
||||
'/docs/$': typeof DocsSplatRoute
|
||||
'/docs/': typeof DocsIndexRoute
|
||||
'/(auth)/user/profile': typeof authUserProfileRoute
|
||||
'/(auth)/user/resetpassword': typeof authUserResetpasswordRoute
|
||||
'/(auth)/user/signup': typeof authUserSignupRoute
|
||||
@@ -108,6 +126,8 @@ export interface FileRouteTypes {
|
||||
| '/admin/logs'
|
||||
| '/admin/notifications'
|
||||
| '/admin/settings'
|
||||
| '/docs/$'
|
||||
| '/docs/'
|
||||
| '/user/profile'
|
||||
| '/user/resetpassword'
|
||||
| '/user/signup'
|
||||
@@ -119,6 +139,8 @@ export interface FileRouteTypes {
|
||||
| '/admin/logs'
|
||||
| '/admin/notifications'
|
||||
| '/admin/settings'
|
||||
| '/docs/$'
|
||||
| '/docs'
|
||||
| '/user/profile'
|
||||
| '/user/resetpassword'
|
||||
| '/user/signup'
|
||||
@@ -130,6 +152,8 @@ export interface FileRouteTypes {
|
||||
| '/admin/logs'
|
||||
| '/admin/notifications'
|
||||
| '/admin/settings'
|
||||
| '/docs/$'
|
||||
| '/docs/'
|
||||
| '/(auth)/user/profile'
|
||||
| '/(auth)/user/resetpassword'
|
||||
| '/(auth)/user/signup'
|
||||
@@ -142,6 +166,8 @@ export interface RootRouteChildren {
|
||||
AdminLogsRoute: typeof AdminLogsRoute
|
||||
AdminNotificationsRoute: typeof AdminNotificationsRoute
|
||||
AdminSettingsRoute: typeof AdminSettingsRoute
|
||||
DocsSplatRoute: typeof DocsSplatRoute
|
||||
DocsIndexRoute: typeof DocsIndexRoute
|
||||
authUserProfileRoute: typeof authUserProfileRoute
|
||||
authUserResetpasswordRoute: typeof authUserResetpasswordRoute
|
||||
authUserSignupRoute: typeof authUserSignupRoute
|
||||
@@ -163,6 +189,20 @@ declare module '@tanstack/react-router' {
|
||||
preLoaderRoute: typeof IndexRouteImport
|
||||
parentRoute: typeof rootRouteImport
|
||||
}
|
||||
'/docs/': {
|
||||
id: '/docs/'
|
||||
path: '/docs'
|
||||
fullPath: '/docs/'
|
||||
preLoaderRoute: typeof DocsIndexRouteImport
|
||||
parentRoute: typeof rootRouteImport
|
||||
}
|
||||
'/docs/$': {
|
||||
id: '/docs/$'
|
||||
path: '/docs/$'
|
||||
fullPath: '/docs/$'
|
||||
preLoaderRoute: typeof DocsSplatRouteImport
|
||||
parentRoute: typeof rootRouteImport
|
||||
}
|
||||
'/admin/settings': {
|
||||
id: '/admin/settings'
|
||||
path: '/admin/settings'
|
||||
@@ -222,6 +262,8 @@ const rootRouteChildren: RootRouteChildren = {
|
||||
AdminLogsRoute: AdminLogsRoute,
|
||||
AdminNotificationsRoute: AdminNotificationsRoute,
|
||||
AdminSettingsRoute: AdminSettingsRoute,
|
||||
DocsSplatRoute: DocsSplatRoute,
|
||||
DocsIndexRoute: DocsIndexRoute,
|
||||
authUserProfileRoute: authUserProfileRoute,
|
||||
authUserResetpasswordRoute: authUserResetpasswordRoute,
|
||||
authUserSignupRoute: authUserSignupRoute,
|
||||
|
||||
31
frontend/src/routes/docs/$.tsx
Normal file
31
frontend/src/routes/docs/$.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
import { createFileRoute, Link } from "@tanstack/react-router";
|
||||
import { getDoc } from "../../lib/docs";
|
||||
|
||||
export const Route = createFileRoute("/docs/$")({
|
||||
component: RouteComponent,
|
||||
});
|
||||
|
||||
function RouteComponent() {
|
||||
const { _splat } = Route.useParams();
|
||||
const slug = _splat || "";
|
||||
|
||||
const Doc = getDoc(slug);
|
||||
|
||||
if (!Doc) {
|
||||
return (
|
||||
<div>
|
||||
<p>
|
||||
You Have reached a doc page that dose not seem to exist please
|
||||
validate and come back
|
||||
</p>
|
||||
<Link to="/docs">Docs Home</Link>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="mx-auto w-full max-w-4xl px-6 py-8">
|
||||
<Doc />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
100
frontend/src/routes/docs/index.tsx
Normal file
100
frontend/src/routes/docs/index.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
import { createFileRoute, Link } from "@tanstack/react-router";
|
||||
|
||||
export const Route = createFileRoute("/docs/")({
|
||||
component: RouteComponent,
|
||||
});
|
||||
|
||||
function RouteComponent() {
|
||||
return (
|
||||
<div className="mx-auto w-full max-w-4xl px-6 py-8">
|
||||
<h1 className="text-3xl underline p-2">Logistics Support Tool Intro</h1>
|
||||
<h2 className="text-2xl shadow-2xl p-2">What is lst</h2>
|
||||
<p className="p-2">
|
||||
Lst is a logistics support tool, and aid to ALPLAprod All data in here
|
||||
is just to be treated as an aid and can still be completed manually in
|
||||
alplaprod. These docs are here to help show what LST has to offer as
|
||||
well as the manual process via alpla prod.
|
||||
</p>
|
||||
<hr />
|
||||
<h2 className="text-2xl shadow-2xl p-2">What dose LST offer</h2>
|
||||
<ul className="list-disc list-inside">
|
||||
<li>One click print</li>
|
||||
<ul className="list-disc list-inside indent-8">
|
||||
<li>Controls printing of labels</li>
|
||||
<li>devices that can be used</li>
|
||||
<ul className="list-disc list-inside indent-16">
|
||||
<li>Printer control</li>
|
||||
<li>plc control</li>
|
||||
<li>ame palletizer control</li>
|
||||
</ul>
|
||||
<li>considers more business logic than alplaprod</li>
|
||||
<ul className="list-disc list-inside indent-16">
|
||||
<li>
|
||||
enough material is needed in the system to create the next pallet
|
||||
</li>
|
||||
<li>this will be the same for packaging as well.</li>
|
||||
</ul>
|
||||
<li>special processes</li>
|
||||
<ul className="list-disc list-inside indent-16">
|
||||
<li>in-house delivery triggered once booked in</li>
|
||||
<li>stop gap on printing labels at specific times</li>
|
||||
<li>per line delay in printing</li>
|
||||
</ul>
|
||||
</ul>
|
||||
<li>Silos Management</li>
|
||||
<ul className="list-disc list-inside indent-8">
|
||||
<li>Silo adjustments per location</li>
|
||||
<ul className="list-disc list-inside indent-16">
|
||||
<li>Charts for the last 10 adjustments</li>
|
||||
<li>Historical data</li>
|
||||
<li>Comments on per adjustment</li>
|
||||
<li>Automatic email for more than 5% deviation</li>
|
||||
</ul>
|
||||
<li>Attach silo</li>
|
||||
<ul className="list-disc list-inside indent-16">
|
||||
<li>Only shows machines not attached to this silo</li>
|
||||
</ul>
|
||||
<li>Detach silo</li>
|
||||
<ul className="list-disc list-inside indent-16">
|
||||
Only shows machines that are attached to the silo.
|
||||
</ul>
|
||||
</ul>
|
||||
<li>TMS integration</li>
|
||||
<ul className="list-disc list-inside indent-8">
|
||||
<li>integration with TI to auto add in orders</li>
|
||||
<ul className="list-disc list-inside indent-16">
|
||||
<li>orders are based on a time defined per plant.</li>
|
||||
<li>carriers can be auto set.</li>
|
||||
</ul>
|
||||
</ul>
|
||||
<li>
|
||||
<Link
|
||||
to={"/docs/$"}
|
||||
params={{ _splat: "notifications/intro" }}
|
||||
className="underline"
|
||||
>
|
||||
Notifications
|
||||
</Link>
|
||||
</li>
|
||||
<ul className="list-disc list-inside indent-8">
|
||||
<li>Automated alerts</li>
|
||||
<li>Subscription based</li>
|
||||
<li>Processes notifications</li>
|
||||
</ul>
|
||||
<li>Datamart</li>
|
||||
<ul className="list-disc list-inside indent-8">
|
||||
<li>queries that can be pulled via excel</li>
|
||||
<li>queries are created to allow better views for the plants</li>
|
||||
<li>Faster customer reports</li>
|
||||
</ul>
|
||||
<li>Fake EDI (Demand Management)</li>
|
||||
<ul className="list-disc list-inside indent-8">
|
||||
<li>Orders in (standard template)</li>
|
||||
<li>Customer specific orders templates per plant</li>
|
||||
<li>Forecast (standard Template)</li>
|
||||
<li>Customer specific forecast per plant</li>
|
||||
</ul>
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import { tanstackRouter } from "@tanstack/router-plugin/vite";
|
||||
import react from "@vitejs/plugin-react-swc";
|
||||
import { defineConfig } from "vite";
|
||||
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
|
||||
1
migrations/0025_talented_vector.sql
Normal file
1
migrations/0025_talented_vector.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "alpla_purchase_history" ADD COLUMN "updated_at" timestamp DEFAULT now();
|
||||
1
migrations/0026_vengeful_wiccan.sql
Normal file
1
migrations/0026_vengeful_wiccan.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "alpla_purchase_history" ALTER COLUMN "approved_status" SET DEFAULT 'new';
|
||||
1474
migrations/meta/0025_snapshot.json
Normal file
1474
migrations/meta/0025_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1474
migrations/meta/0026_snapshot.json
Normal file
1474
migrations/meta/0026_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -176,6 +176,20 @@
|
||||
"when": 1775661516749,
|
||||
"tag": "0024_absent_barracuda",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 25,
|
||||
"version": "7",
|
||||
"when": 1775755338816,
|
||||
"tag": "0025_talented_vector",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 26,
|
||||
"version": "7",
|
||||
"when": 1775786221817,
|
||||
"tag": "0026_vengeful_wiccan",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
23
package-lock.json
generated
23
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "lst_v3",
|
||||
"version": "0.0.1-alpha.2",
|
||||
"version": "0.0.1-alpha.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "lst_v3",
|
||||
"version": "0.0.1-alpha.2",
|
||||
"version": "0.0.1-alpha.3",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@dotenvx/dotenvx": "^1.57.0",
|
||||
@@ -24,6 +24,7 @@
|
||||
"drizzle-zod": "^0.8.3",
|
||||
"express": "^5.2.1",
|
||||
"husky": "^9.1.7",
|
||||
"ldapts": "^8.1.7",
|
||||
"morgan": "^1.10.1",
|
||||
"mssql": "^12.2.1",
|
||||
"multer": "^2.1.1",
|
||||
@@ -8063,6 +8064,18 @@
|
||||
"node": ">=20.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ldapts": {
|
||||
"version": "8.1.7",
|
||||
"resolved": "https://registry.npmjs.org/ldapts/-/ldapts-8.1.7.tgz",
|
||||
"integrity": "sha512-TJl6T92eIwMf/OJ0hDfKVa6ISwzo+lqCWCI5Mf//ARlKa3LKQZaSrme/H2rCRBhW0DZCQlrsV+fgoW5YHRNLUw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"strict-event-emitter-types": "2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/lines-and-columns": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
|
||||
@@ -10422,6 +10435,12 @@
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/strict-event-emitter-types": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strict-event-emitter-types/-/strict-event-emitter-types-2.0.0.tgz",
|
||||
"integrity": "sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "lst_v3",
|
||||
"version": "0.0.1-alpha.2",
|
||||
"version": "0.0.1-alpha.3",
|
||||
"description": "The tool that supports us in our everyday alplaprod",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
@@ -10,10 +10,12 @@
|
||||
"dev:frontend": "cd frontend && npm run dev",
|
||||
"dev:db:migrate": "npx drizzle-kit push",
|
||||
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle.config.ts",
|
||||
"build": "rimraf dist && npm run dev:db:generate && npm run dev:db:migrate && npm run build:app && npm run build:copySql && cd frontend && npm run build",
|
||||
"build": "rimraf dist && npm run dev:db:generate && npm run dev:db:migrate && npm run build:app && npm run build:copySql && npm run build:copyGpSql && npm run build:emailTemplate && cd frontend && npm run build",
|
||||
"build:app": "tsc",
|
||||
"agent": "powershell -ExecutionPolicy Bypass -File scripts/agentController.ps1",
|
||||
"build:docker": "rimraf dist && npm run build:app && npm run build:copySql",
|
||||
"build:docker": "rimraf dist && npm run build:app && npm run build:copySql && npm run build:copyGpSql && npm run build:emailTemplate",
|
||||
"build:emailTemplate": "cpy \"backend/utils/mailViews/**/*\" dist/utils/mailViews --parents",
|
||||
"build:copyGpSql": "cpy \"backend/gpSql/queries/**/*\" dist/gpSql/queries --parents",
|
||||
"build:copySql": "cpy \"backend/prodSql/queries/**/*\" dist/prodSql/queries --parents",
|
||||
"lint": "tsc && biome lint",
|
||||
"start": "npm run start:server",
|
||||
@@ -75,6 +77,7 @@
|
||||
"drizzle-zod": "^0.8.3",
|
||||
"express": "^5.2.1",
|
||||
"husky": "^9.1.7",
|
||||
"ldapts": "^8.1.7",
|
||||
"morgan": "^1.10.1",
|
||||
"mssql": "^12.2.1",
|
||||
"multer": "^2.1.1",
|
||||
|
||||
@@ -27,21 +27,15 @@ $Servers = @(
|
||||
token = "usday1"
|
||||
loc = "D$\LST_V3"
|
||||
},
|
||||
[PSCustomObject]@{
|
||||
server = "usmcd1vms036"
|
||||
token = "test1"
|
||||
loc = "E$\LST_V3"
|
||||
},
|
||||
[PSCustomObject]@{
|
||||
server = "usiow1vms036"
|
||||
token = "test2"
|
||||
loc = "E$\LST_V3"
|
||||
}
|
||||
,
|
||||
[PSCustomObject]@{
|
||||
server = "usweb1vms006"
|
||||
token = "usweb1"
|
||||
loc = "D$\LST_V3"
|
||||
},
|
||||
[PSCustomObject]@{
|
||||
server = "usjci1vms006"
|
||||
token = "usjci1"
|
||||
loc = "D$\LST_V3"
|
||||
}
|
||||
#@{ server = "usbet1vms006"; token = "usbet1";loc = "C$\Users\adm_matthes01\Desktop\lst_backend"; }
|
||||
#@{ server = "usbow1vms006"; token = "usbow1"; loc = "C$\Users\adm_matthes01\Desktop\lst_backend" ; }
|
||||
@@ -86,9 +80,10 @@ function Show-Menu {
|
||||
Write-Host "==============================="
|
||||
Write-Host "1. Build app"
|
||||
Write-Host "2. Deploy New Release"
|
||||
Write-Host "3. Upgrade Node"
|
||||
Write-Host "4. Update Postgres"
|
||||
Write-Host "5. Exit"
|
||||
Write-Host "3. Deploy Test Servers"
|
||||
Write-Host "4. Upgrade Node"
|
||||
Write-Host "5. Update Postgres"
|
||||
Write-Host "6. Exit"
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
@@ -345,7 +340,7 @@ function Update-Server {
|
||||
Start-Sleep -Seconds 3
|
||||
Write-Host "Install/update completed."
|
||||
# do the migrations
|
||||
Push-Location $LocalPath
|
||||
# Push-Location $LocalPath
|
||||
Write-Host "Running migrations"
|
||||
npm run dev:db:migrate
|
||||
Start-Sleep -Seconds 3
|
||||
@@ -406,6 +401,45 @@ do {
|
||||
}
|
||||
}
|
||||
"3" {
|
||||
$TestServers = @(
|
||||
[PSCustomObject]@{
|
||||
server = "usmcd1vms036"
|
||||
token = "test1"
|
||||
loc = "E$\LST_V3"
|
||||
},
|
||||
[PSCustomObject]@{
|
||||
server = "usiow1vms036"
|
||||
token = "test2"
|
||||
loc = "E$\LST_V3"
|
||||
}
|
||||
)
|
||||
$testServer = Select-Server -List $TestServers
|
||||
|
||||
if($testServer -eq "all") {
|
||||
Write-Host "Updating all servers"
|
||||
for ($i = 0; $i -lt $TestServers.Count; $i++) {
|
||||
Write-Host "Updating $($TestServers[$i].server)"
|
||||
Update-Server -Server $TestServers[$i].server -Destination $TestServers[$i].loc -Token $TestServers[$i].token
|
||||
Start-Sleep -Seconds 1
|
||||
}
|
||||
Read-Host -Prompt "Press Enter to continue..."
|
||||
}
|
||||
|
||||
if ($testServer -ne "all") {
|
||||
Write-Host "You selected $($testServer.server)"
|
||||
# do the update to the server.
|
||||
# copy to the server
|
||||
Update-Server -Server $testServer.server -Destination $testServer.loc -Token $testServer.token
|
||||
# stop service
|
||||
# extract zip
|
||||
# run update check
|
||||
# run migration
|
||||
# start service backup
|
||||
|
||||
Read-Host -Prompt "Press Enter to continue..."
|
||||
}
|
||||
}
|
||||
"4" {
|
||||
Write-Host "Choose Server to upgrade node on"
|
||||
$server = Select-Server -List $Servers
|
||||
|
||||
@@ -430,7 +464,7 @@ do {
|
||||
Read-Host -Prompt "Press Enter to continue..."
|
||||
}
|
||||
}
|
||||
"4" {
|
||||
"5" {
|
||||
Write-Host "Choose Server to upgrade postgres on"
|
||||
$server = Select-Server -List $Servers
|
||||
|
||||
@@ -456,7 +490,7 @@ do {
|
||||
}
|
||||
|
||||
}
|
||||
"5" {
|
||||
"6" {
|
||||
Write-Host "Exiting..."
|
||||
exit
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user