Compare commits

...

6 Commits

44 changed files with 2937 additions and 827 deletions

View File

@@ -12,6 +12,9 @@ LOG_LEVEL=debug
# alpaprod tec apiKey
TEC_API_KEY=api key
# v1 listener
DATABASE_URL_V1=postgresql://ausername:password@localhost:5433/lst_db
# postgres connection
DATABASE_HOST=localhost
DATABASE_PORT=5432

View File

@@ -3,7 +3,9 @@
"workbench.colorTheme": "Default Dark+",
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {"source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit" },
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"]
"editor.codeActionsOnSave": {
"source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit"
},
"cSpell.words": ["alpla", "alplamart", "alplaprod", "intiallally", "ppoo"]
}

View File

@@ -4,8 +4,8 @@ meta {
seq: 3
}
post {
url: {{url}}/lst/api/admin/:userID/grant
patch {
url: {{url}}/lst/api/admin/:userID/revoke
body: json
auth: inherit
}
@@ -16,8 +16,8 @@ params:path {
body:json {
{
"module":"siloAdjustments",
"role":"viewer"
"module":"ocp"
}
}

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 4
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,28 @@
meta {
name: Preprint
type: http
seq: 1
}
post {
url: {{url}}/lst/api/logistics/labeling/preprint
body: json
auth: inherit
}
body:json {
{
"scannerId": 999,
"lotNr": 26321,
"machineId": 3, // 457=22, 458=23
"printerId": 7, // 457=22, 458=23
"layoutId": 25,
"numberOfCopies": 0,
"qtyToPrint": 5
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: labeling
seq: 1
}
auth {
mode: inherit
}

View File

@@ -1,7 +1,7 @@
vars {
url: http://localhost:4200
url: https://usmcd1vms036.alpla.net
session_cookie:
urlv2: http://usiow1vms006:3001
urlv2: http://usmcd1vms036:3000
jwtV2:
}
vars:secret [

View File

@@ -1,26 +1,29 @@
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import { toNodeHandler } from "better-auth/node";
import cors from "cors";
import express from "express";
import morgan from "morgan";
import { createServer } from "http";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import morgan from "morgan";
import os from "os";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { baseSettings } from "./src/internal/system/controller/settings/baseSettings.js";
import { auth } from "./src/pkg/auth/auth.js";
import { db } from "./src/pkg/db/db.js";
import { settings } from "./src/pkg/db/schema/settings.js";
import { validateEnv } from "./src/pkg/utils/envValidator.js";
import { createLogger } from "./src/pkg/logger/logger.js";
import { returnFunc } from "./src/pkg/utils/return.js";
import { initializeProdPool } from "./src/pkg/prodSql/prodSqlConnect.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import os from "os";
import cors from "cors";
import { sendNotify } from "./src/pkg/utils/notify.js";
import { toNodeHandler } from "better-auth/node";
import { auth } from "./src/pkg/auth/auth.js";
import { v1Listener } from "./src/pkg/logger/v1Listener.js";
import { apiHitMiddleware } from "./src/pkg/middleware/apiHits.js";
import { initializeProdPool } from "./src/pkg/prodSql/prodSqlConnect.js";
import { validateEnv } from "./src/pkg/utils/envValidator.js";
import { sendNotify } from "./src/pkg/utils/notify.js";
import { returnFunc } from "./src/pkg/utils/return.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import { setupIoServer } from "./src/ws/server.js";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
const main = async () => {
const env = validateEnv(process.env);
@@ -74,7 +77,7 @@ const main = async () => {
app.use(
basePath + "/test",
express.static(join(__dirname, "../controller"))
express.static(join(__dirname, "../controller")),
);
}
@@ -106,10 +109,7 @@ const main = async () => {
}
// Now this works for *.alpla.net
if (
hostname.endsWith(".alpla.net") ||
hostname === "alpla.net"
) {
if (hostname.endsWith(".alpla.net") || hostname === "alpla.net") {
return callback(null, true);
}
} catch (err) {
@@ -120,17 +120,14 @@ const main = async () => {
},
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true,
})
}),
);
// docs and api stuff
app.use(
basePath + "/d",
express.static(join(__dirname, "../lstDocs/build"))
);
app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build")));
app.use(
basePath + "/app",
express.static(join(__dirname, "../frontend/dist"))
express.static(join(__dirname, "../frontend/dist")),
);
// server setup
@@ -142,18 +139,24 @@ const main = async () => {
// ws stuff
setupIoServer(server, basePath);
// sub systems
// start all systems after we are intiallally up and running
setTimeout(() => {
baseSettings();
printers();
schedulerManager();
// start up the v1listener
v1Listener();
}, 5 * 1000);
// start the server up
server.listen(PORT, "0.0.0.0", () =>
log.info(
{ stack: { name: "test" } },
`Server running in ${
process.env.NODE_ENV ? process.env.NODE_ENV : "dev"
}, on http://0.0.0.0:${PORT}${basePath}`
)
}, on http://0.0.0.0:${PORT}${basePath}`,
),
);
process.on("uncaughtException", async (err) => {
@@ -189,10 +192,8 @@ const main = async () => {
// const used = process.memoryUsage();
// console.log(
// `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
// used.rss /
// 1024 /
// 1024
// ).toFixed(2)} MB`
// used.rss / 1024 / 1024
// ).toFixed(2)} MB`,
// );
// }, 10000);
};

View File

@@ -0,0 +1,22 @@
/**
* This is intended for when running as dev so we can always keep the servers in sync with the main server.
* in the event the server has a change on it we want to make sure we stay in sync
*/
import { createLogger } from "../../../../pkg/logger/logger.js";
export const mainServerSync = async () => {
const log = createLogger({ module: "admin", subModule: "main server sync" });
if (
process.env.NODE_ENV?.trim() !== "production" &&
process.env.MAIN_SERVER
) {
log.info(
{},
"Running in dev and have a main server set we will now pull the servers and look for any changes",
);
} else {
log.info({}, "This server is running in production no sync will happen");
return;
}
};

View File

@@ -1,6 +1,6 @@
import type { Express, Request, Response } from "express";
import { requireAuth } from "../../pkg/middleware/authMiddleware.js";
import { mainServerSync } from "./controller/servers/matchServers.js";
//admin routes
import users from "./routes/getUserRoles.js";
import grantRoles from "./routes/grantRole.js";
@@ -30,4 +30,9 @@ export const setupAdminRoutes = (app: Express, basePath: string) => {
requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
revokeRoles,
);
// run the sync only on startup
setTimeout(() => {
mainServerSync();
}, 5 * 1000);
};

View File

@@ -1,15 +1,15 @@
import { Router } from "express";
import axios from "axios";
import { type DrizzleError, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import {
insertServerDataSchema,
serverData,
} from "../../../../pkg/db/schema/servers.js";
import { db } from "../../../../pkg/db/db.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import type { DrizzleError } from "drizzle-orm";
import axios from "axios";
import { createLogger } from "../../../../pkg/logger/logger.js";
import https from "https";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
@@ -26,12 +26,18 @@ router.post("/", async (req: Request, res: Response) => {
const { data, error } = await tryCatch(
db
.insert(serverData)
.values(parsed.data)
.values({
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
})
//.onConflictDoNothing()
.returning({
name: serverData.name,
plantToken: serverData.plantToken,
})
}),
);
if (error) {
@@ -58,7 +64,7 @@ router.post("/", async (req: Request, res: Response) => {
},
{
headers: { "Content-Type": "application/json" },
}
},
)) as any;
const setCookie = loginRes.headers["set-cookie"][0];
@@ -76,19 +82,19 @@ router.post("/", async (req: Request, res: Response) => {
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
}
)
},
),
);
if (error) {
log.error(
{ stack: error },
"There was an error adding the server to Main Server"
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server."
"A new Server was just added to the server.",
);
}

View File

@@ -1,12 +1,12 @@
import { Router } from "express";
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { eq } from "drizzle-orm";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import axios from "axios";
import { createLogger } from "../../../../pkg/logger/logger.js";
import https from "https";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
@@ -66,6 +66,10 @@ router.patch("/:token", async (req: Request, res: Response) => {
if (req.body?.active !== undefined) {
updates.active = req.body.active;
}
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
try {
if (Object.keys(updates).length > 0) {
await db
@@ -90,7 +94,7 @@ router.patch("/:token", async (req: Request, res: Response) => {
},
{
headers: { "Content-Type": "application/json" },
}
},
)) as any;
const setCookie = loginRes?.headers["set-cookie"][0];
@@ -98,9 +102,7 @@ router.patch("/:token", async (req: Request, res: Response) => {
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
if (!setCookie) {
throw new Error(
"Did not receive a Set-Cookie header from login"
);
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
@@ -113,20 +115,20 @@ router.patch("/:token", async (req: Request, res: Response) => {
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
}
)
},
),
);
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server"
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server."
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });

View File

@@ -0,0 +1,133 @@
/**
* we want to be able to preprint labels from finished lots.
* we will need a lot number
* machine
* printer
* qty will come over as one by default
* copies will come over as 0 by default
* layout
*/
import { createLogger } from "../../../../pkg/logger/logger.js";
import { delay } from "../../../../pkg/utils/delay.js";
import { prodEndpoint } from "../../../../pkg/utils/prodEndpoint.js";
import type { returnFunc } from "../../../../pkg/utils/return.js";
export type Preprint = {
scannerId: number;
lotNr: number;
machineId: number;
printerId: number;
layoutId: number;
numberOfCopies: number;
qtyToPrint: number;
};
export const preprintLabels = async (preprint: Preprint) => {
const log = createLogger({
module: "logistics",
subModule: "preprint",
});
let x = 0;
const labelsPrinted: number[] = [];
if (preprint.qtyToPrint > 1) {
do {
const labels = await prodEndpoint(
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
},
);
if (labels?.data.Result === 1) {
log.error(
{},
`There was an error printing the label: ${labels.data.Message}`,
);
return {
success: false,
message: `${labels.data.Message}`,
};
} else {
if (!labels?.success) {
log.error(
{ error: labels?.data },
`There was an error printing the label`,
);
return {
success: false,
message: `${labels?.message}`,
data: labels?.data,
};
}
labelsPrinted.push(parseInt(labels?.data.SSCC.slice(10, -1)));
log.info(
{},
`Label just created ${parseInt(labels?.data.SSCC.slice(10, -1))} and printed, remaining to print ${preprint.qtyToPrint - x}`,
);
}
await delay(250);
x++;
} while (x < preprint.qtyToPrint);
return {
success: true,
message: `${preprint.qtyToPrint} were just printed`,
data: labelsPrinted,
};
} else {
const labels = await prodEndpoint(
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
},
);
if (labels?.data.Result === 1) {
log.error(
{},
`There was an error printing the label: ${labels.data.Message}`,
);
return {
success: false,
message: `${labels.data.Message}`,
};
} else {
if (!labels?.success) {
log.error(
{ error: labels?.data },
`There was an error printing the label`,
);
return {
success: false,
message: `${labels?.message}`,
data: labels?.data,
};
}
labelsPrinted.push(parseInt(labels.data.SSCC.slice(10, -1)));
log.info(
{},
`Label just created ${parseInt(labels.data.SSCC.slice(10, -1))} and printed`,
);
return {
success: true,
message: `${preprint.qtyToPrint} were just printed.`,
data: labelsPrinted,
};
}
}
};

View File

@@ -50,7 +50,7 @@ export const schedulerManager = async () => {
//console.log(data);
if (orderData.length === 0) {
log.info({}, "There are no new orders or incoming to be updated");
log.debug({}, "There are no new orders or incoming to be updated");
return;
}

View File

@@ -1,16 +1,17 @@
import type { Express, Request, Response } from "express";
import { requireAuth } from "../../pkg/middleware/authMiddleware.js";
import labeling from "./routes/labeling/labelingRoutes.js";
import schedule from "./routes/scheduler/scheduleRoutes.js";
export const setupLogisticsRoutes = (app: Express, basePath: string) => {
app.use(basePath + "/api/logistics/schedule", schedule);
app.use(basePath + "/api/logistics/labeling", labeling);
app.use(
basePath + "/api/admin/users",
requireAuth("user", ["systemAdmin"]) // will pass bc system admin but this is just telling us we need this
);
app.use(
basePath + "/api/admin",
requireAuth("user", ["systemAdmin", "admin"]) // will pass bc system admin but this is just telling us we need this
);
// app.use(
// basePath + "/api/admin/users",
// requireAuth("user", ["systemAdmin"]), // will pass bc system admin but this is just telling us we need this
// );
// app.use(
// basePath + "/api/admin",
// requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
// );
};

View File

@@ -0,0 +1,8 @@
import { Router } from "express";
import preprint from "./perprint.js";
const router = Router();
router.use("/", preprint);
export default router;

View File

@@ -0,0 +1,27 @@
import type { Request, Response } from "express";
import { Router } from "express";
import z from "zod";
import { preprintLabels } from "../../controller/labeling/preprint.js";
export const Preprint = z.object({
scannerId: z.number(),
lotNr: z.number(),
machineId: z.number(), // 457=22, 458=23
printerId: z.number(), // 457=22, 458=23
layoutId: z.number(),
numberOfCopies: z.number(),
qtyToPrint: z.number().default(1),
});
const router = Router();
router.post("/preprint", async (req: Request, res: Response) => {
const parsed = Preprint.safeParse(req.body);
const print = await preprintLabels(req.body);
res
.status(200)
.json({ success: print.success, message: print.message, data: print.data });
});
export default router;

View File

@@ -0,0 +1,37 @@
/**
* will be all the base settings so we dont have to remember to add ever new setting in these will be the defaults
*/
import { readFileSync } from "fs";
import path from "path";
import { fileURLToPath } from "url";
import { db } from "../../../../pkg/db/db.js";
import { settings } from "../../../../pkg/db/schema/settings.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
export const baseSettings = async () => {
const log = createLogger({ module: "system", subModule: "base settings" });
const settingsPath = path.resolve(__dirname, "./settings.json");
const newSettings = JSON.parse(readFileSync(settingsPath, "utf-8"));
const { data, error } = await tryCatch(
db
.insert(settings)
.values(newSettings)
.onConflictDoNothing()
.returning({ name: settings.name }),
);
if (error) {
log.error({ error }, "There was an error adding new settings");
}
if (data) {
log.info({ newSettingsAdded: data }, "New settings added");
}
};

View File

@@ -0,0 +1,16 @@
[
{
"name": "plantToken",
"value": "test3",
"description": "The plant token for the plant IE: test3 or usday1",
"moduleName": "system",
"roles": ["systemAdmin"]
},
{
"name": "dbServer",
"value": "usmcd1vms036",
"description": "What is the db server",
"moduleName": "system",
"roles": ["systemAdmin"]
}
]

View File

@@ -1,6 +1,12 @@
import type { Express, Request, Response } from "express";
import settings from "./routes/settings/settingRoutes.js";
import stats from "./routes/stats.js";
export const setupSystemRoutes = (app: Express, basePath: string) => {
app.use(basePath + "/api/system/stats", stats);
app.use(
basePath + "/api/system/settings", // will pass bc system admin but this is just telling us we need this
settings,
);
};

View File

@@ -0,0 +1,35 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { and, asc, eq } from "drizzle-orm";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const token = req.query.token;
const conditions = [];
if (token !== undefined) {
conditions.push(eq(serverData.plantToken, `${token}`));
}
conditions.push(eq(serverData.active, true));
const { data, error } = await tryCatch(
db
.select()
.from(serverData)
.where(and(...conditions))
.orderBy(asc(serverData.name))
);
if (error) {
return res.status(400).json({ error: error });
}
res.status(200).json({ message: "Current Active server", data: data });
});
export default router;

View File

@@ -0,0 +1,11 @@
import { Router } from "express";
import { requireAuth } from "../../../../pkg/middleware/authMiddleware.js";
import getSettings from "./getSettings.js";
import updateSetting from "./updateSetting.js";
const router = Router();
router.use("/", getSettings);
router.use("/", requireAuth("system", ["systemAdmin", "admin"]), updateSetting);
export default router;

View File

@@ -0,0 +1,141 @@
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:token", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update server" });
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token;
const updates: Record<string, any> = {};
if (req.body?.name !== undefined) {
updates.name = req.body.name;
}
if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS;
}
if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress;
}
if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
}
if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort;
}
if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc;
}
if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress;
}
if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState;
}
if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode;
}
if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail;
}
if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone;
}
if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc;
}
if (req.body?.active !== undefined) {
updates.active = req.body.active;
}
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.set(updates)
.where(eq(serverData.plantToken, token));
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
const setCookie = loginRes?.headers["set-cookie"][0];
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
},
),
);
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });
}
});
export default router;

View File

@@ -1,12 +1,12 @@
import { format } from "date-fns-tz";
import { eq } from "drizzle-orm";
import { Router } from "express";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { db } from "../../../pkg/db/db.js";
import {
serverStats,
type ServerStats,
serverStats,
} from "../../../pkg/db/schema/serverstats.js";
import { eq } from "drizzle-orm";
import { format } from "date-fns-tz";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { checkBuildUpdate } from "../utlis/checkForBuild.js";
const router = Router();
@@ -14,7 +14,7 @@ const router = Router();
// GET /health
router.get("/", async (req, res) => {
const { data, error } = await tryCatch(
db.select().from(serverStats).where(eq(serverStats.id, "serverStats"))
db.select().from(serverStats).where(eq(serverStats.id, "serverStats")),
);
if (error || !data) {
@@ -22,6 +22,7 @@ router.get("/", async (req, res) => {
}
const statData = data as ServerStats[];
const used = process.memoryUsage();
res.json({
status: "ok",
uptime: process.uptime(),
@@ -30,6 +31,9 @@ router.get("/", async (req, res) => {
lastUpdate: statData[0]?.lastUpdate
? format(statData[0].lastUpdate, "MM/dd/yyyy HH:mm")
: "",
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024
).toFixed(2)} MB`,
});
});

View File

@@ -0,0 +1,27 @@
import {
jsonb,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
} from "drizzle-orm/pg-core";
export const prodPermissions = pgTable(
"prodPermissions",
{
prodPerm_id: uuid("prodPerm_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
description: text("description").notNull(),
roles: jsonb("roles").default([]),
rolesLegacy: jsonb("rolesLegacy").default([]),
add_User: text("add_User").default("LST_System").notNull(),
add_Date: timestamp("add_Date").defaultNow(),
upd_user: text("upd_User").default("LST_System").notNull(),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("prodPermName").on(table.name),
],
);

View File

@@ -30,12 +30,16 @@ export const serverData = pgTable(
serverLoc: text("serverLoc").notNull(),
lastUpdated: timestamp("lastUpdated").defaultNow(),
isUpgrading: boolean("isUpgrading").default(false),
add_user: text("add_user").default("lst_user"),
add_date: timestamp("add_date").defaultNow(),
upd_user: text("upd_user").default("lst_user"),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("plantToken").on(table.plantToken),
]
],
);
export const selectServerDataSchema = createSelectSchema(serverData);

View File

@@ -0,0 +1,70 @@
import { Client } from "pg";
import { createLogger } from "./logger.js";
type NewLog = {
level: string;
username: string;
service: string;
message: string;
checked: boolean;
add_Date: Date;
};
export const v1Listener = async () => {
const log = createLogger({ module: "logger", subModule: "Old logging app" });
const client = new Client({
connectionString: process.env.DATABASE_URL_V1,
});
await client.connect();
// the notify channel to listen for logs on
const channels = ["logs_channel", "users_channel", "orders_channel"];
for (const ch of channels) {
await client.query(`LISTEN ${ch}`);
}
console.log("Listening for:", channels.join(", "));
// create the log function to be able to mimic what is coming over
const logEvent = (newLog: string) => {
const newLogEvent: NewLog = JSON.parse(newLog);
switch (newLogEvent.level) {
case "info":
log.info(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
break;
case "error":
log.error(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
break;
default:
log.info(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
}
};
client.on("notification", (msg) => {
// msg.channel tells which channel it came from
// msg.payload is whatever message you sent from the trigger
switch (msg.channel) {
case "logs_channel":
logEvent(msg.payload || "");
break;
case "users_channel":
console.log("👤 User event:", msg.payload);
break;
case "orders_channel":
console.log("🛒 Order event:", msg.payload);
break;
default:
console.log("Other event:", msg);
}
});
};

View File

@@ -1,8 +1,8 @@
import type { Request, Response, NextFunction } from "express";
import { auth } from "../auth/auth.js";
import { userRoles, type UserRole } from "../db/schema/user_roles.js";
import { db } from "../db/db.js";
import { eq } from "drizzle-orm";
import type { NextFunction, Request, Response } from "express";
import { auth } from "../auth/auth.js";
import { db } from "../db/db.js";
import { type UserRole, userRoles } from "../db/schema/user_roles.js";
declare global {
namespace Express {
@@ -11,6 +11,7 @@ declare global {
id: string;
email?: string;
roles: Record<string, string[]>;
username?: string | null;
};
}
}
@@ -61,6 +62,7 @@ export const requireAuth = (moduleName?: string, requiredRoles?: string[]) => {
id: userId,
email: session.user.email,
roles: rolesByModule,
username: session.user.username,
};
// SystemAdmin override
@@ -71,10 +73,10 @@ export const requireAuth = (moduleName?: string, requiredRoles?: string[]) => {
// Role check (skip if systemAdmin)
if (requiredRoles?.length && !hasSystemAdmin) {
const moduleRoles = moduleName
? rolesByModule[moduleName] ?? []
? (rolesByModule[moduleName] ?? [])
: Object.values(rolesByModule).flat();
const hasAccess = moduleRoles.some((role) =>
requiredRoles.includes(role)
requiredRoles.includes(role),
);
if (!hasAccess) {
return res.status(403).json({ error: "Forbidden" });

View File

@@ -0,0 +1,8 @@
import axios from "axios";
import https from "https";
export const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});

View File

@@ -0,0 +1,3 @@
export const delay = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};

View File

@@ -0,0 +1,90 @@
/**
* This will fire the endpoints
* we only need the endpoint url grabbed from swagger
* and the data to be passed over
*/
import axios, { type Method } from "axios";
import { eq } from "drizzle-orm";
import https from "https";
import { db } from "../db/db.js";
import { settings } from "../db/schema/settings.js";
import { createLogger } from "../logger/logger.js";
import { tryCatch } from "./tryCatch.js";
// create the test server stuff
const testServers = [
{ token: "test1", port: 8940 },
{ token: "test2", port: 8941 },
{ token: "test3", port: 8942 },
];
export const prodEndpoint = async <T>(
method: Method,
endpoint: string,
data?: T,
) => {
const log = createLogger({ module: "pkg", subModule: "prodEndpoints" });
// example url "https://usmcd1vms036.alpla.net:8942/application/public/v1.0/DemandManagement/ORDERS"
let url = "";
// as a reminder when we look for specific like below it will come over as an array this is due to more than one item could be found
const plantToken = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
const testServer = testServers.some(
(server) => server.token === plantToken[0]?.value,
);
const server = await db
.select()
.from(settings)
.where(eq(settings.name, "dbServer"));
if (testServer) {
//filter out what testserver we are
const test = testServers.filter((t) => t.token === plantToken[0].value);
url = `https://${server[0]?.value}.alpla.net:${test[0]?.port}/application${endpoint}`;
} else {
url = `https://${plantToken[0]?.value}prod.alpla.net/application${endpoint}`;
}
// create the axio instance
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
withCredentials: true,
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
const { data: api, error: apiError } = (await tryCatch(
axiosInstance({
method,
url: url,
...(data && { data }),
}),
)) as any;
if (apiError) {
log.error(
{ error: apiError?.response?.data },
"There was an error running the endpoint",
);
return {
success: false,
message: "There was an error processing the endpoint",
data: apiError.response.data,
};
}
if (api) {
return {
success: true,
message: "Prod endpoint processed",
data: api.data,
};
}
};

View File

@@ -1,7 +1,6 @@
import * as React from "react"
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area"
import { cn } from "@/lib/utils"
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area";
import * as React from "react";
import { cn } from "../../lib/utils";
function ScrollArea({
className,
@@ -23,7 +22,7 @@ function ScrollArea({
<ScrollBar />
<ScrollAreaPrimitive.Corner />
</ScrollAreaPrimitive.Root>
)
);
}
function ScrollBar({
@@ -41,7 +40,7 @@ function ScrollBar({
"h-full w-2.5 border-l border-l-transparent",
orientation === "horizontal" &&
"h-2.5 flex-col border-t border-t-transparent",
className
className,
)}
{...props}
>
@@ -50,7 +49,7 @@ function ScrollBar({
className="bg-border relative flex-1 rounded-full"
/>
</ScrollAreaPrimitive.ScrollAreaScrollbar>
)
);
}
export { ScrollArea, ScrollBar }
export { ScrollArea, ScrollBar };

View File

@@ -1,6 +1,5 @@
import * as React from "react"
import { cn } from "@/lib/utils"
import * as React from "react";
import { cn } from "../../lib/utils";
function Table({ className, ...props }: React.ComponentProps<"table">) {
return (
@@ -14,7 +13,7 @@ function Table({ className, ...props }: React.ComponentProps<"table">) {
{...props}
/>
</div>
)
);
}
function TableHeader({ className, ...props }: React.ComponentProps<"thead">) {
@@ -24,7 +23,7 @@ function TableHeader({ className, ...props }: React.ComponentProps<"thead">) {
className={cn("[&_tr]:border-b", className)}
{...props}
/>
)
);
}
function TableBody({ className, ...props }: React.ComponentProps<"tbody">) {
@@ -34,7 +33,7 @@ function TableBody({ className, ...props }: React.ComponentProps<"tbody">) {
className={cn("[&_tr:last-child]:border-0", className)}
{...props}
/>
)
);
}
function TableFooter({ className, ...props }: React.ComponentProps<"tfoot">) {
@@ -43,11 +42,11 @@ function TableFooter({ className, ...props }: React.ComponentProps<"tfoot">) {
data-slot="table-footer"
className={cn(
"bg-muted/50 border-t font-medium [&>tr]:last:border-b-0",
className
className,
)}
{...props}
/>
)
);
}
function TableRow({ className, ...props }: React.ComponentProps<"tr">) {
@@ -56,11 +55,11 @@ function TableRow({ className, ...props }: React.ComponentProps<"tr">) {
data-slot="table-row"
className={cn(
"hover:bg-muted/50 data-[state=selected]:bg-muted border-b transition-colors",
className
className,
)}
{...props}
/>
)
);
}
function TableHead({ className, ...props }: React.ComponentProps<"th">) {
@@ -69,11 +68,11 @@ function TableHead({ className, ...props }: React.ComponentProps<"th">) {
data-slot="table-head"
className={cn(
"text-foreground h-10 px-2 text-left align-middle font-medium whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className
className,
)}
{...props}
/>
)
);
}
function TableCell({ className, ...props }: React.ComponentProps<"td">) {
@@ -82,11 +81,11 @@ function TableCell({ className, ...props }: React.ComponentProps<"td">) {
data-slot="table-cell"
className={cn(
"p-2 align-middle whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className
className,
)}
{...props}
/>
)
);
}
function TableCaption({
@@ -99,7 +98,7 @@ function TableCaption({
className={cn("text-muted-foreground mt-4 text-sm", className)}
{...props}
/>
)
);
}
export {
@@ -111,4 +110,4 @@ export {
TableRow,
TableCell,
TableCaption,
}
};

View File

@@ -10,7 +10,7 @@ interface ShipmentItemProps {
export function ShipmentItem({
shipment,
index = 0,
perm = true,
//perm = true,
}: ShipmentItemProps) {
const { setNodeRef, listeners, attributes, transform } = useDraggable({
id: shipment.orderNumber,

View File

@@ -1,9 +1,7 @@
import { createFileRoute } from "@tanstack/react-router";
import { useEffect, useState } from "react";
import { coreSocket } from "../../../lib/socket.io/socket";
import "../-components/style.css";
import moment from "moment";
import Timeline from "react-calendar-timeline";
export const Route = createFileRoute("/(logistics)/logistics/deliverySchedule")(
{
@@ -19,9 +17,9 @@ export const Route = createFileRoute("/(logistics)/logistics/deliverySchedule")(
function RouteComponent() {
// connect to the channel
const [shipments, setShipments] = useState([]) as any;
//const [shipments, setShipments] = useState([]) as any;
//const [perm] = useState(true); // will check this for sure with a user permissions
const [loaded, setLoaded] = useState(false);
//const [loaded, setLoaded] = useState(false);
// useEffect(() => {
// const handleConnect = () => {

View File

@@ -11,7 +11,7 @@ import { useEffect } from "react";
import { Toaster } from "sonner";
import Nav from "../components/navBar/Nav";
import SideBarNav from "../components/navBar/SideBarNav";
import { SidebarProvider, SidebarTrigger } from "../components/ui/sidebar";
import { SidebarProvider } from "../components/ui/sidebar";
import { userAccess } from "../lib/authClient";
import { SessionGuard } from "../lib/providers/SessionProvider";
import { ThemeProvider } from "../lib/providers/theme-provider";

View File

@@ -91,6 +91,10 @@ function RouteComponent() {
);
},
}),
// password reset will do the email flow
// change password an input for this one so well need inline editing for this dope one
// trash can to delete user
// last login -- need to get working on the server side as well.
columnHelper.accessor("roles", {
header: () => <span>Roles</span>,
cell: ({ row }) => {

View File

@@ -0,0 +1,12 @@
CREATE OR REPLACE FUNCTION notify_new_log()
RETURNS trigger AS $$
BEGIN
PERFORM pg_notify('logs_channel', row_to_json(NEW)::text);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER logs_notify_trigger
AFTER INSERT ON logs
FOR EACH ROW
EXECUTE FUNCTION notify_new_log();

View File

@@ -9,7 +9,7 @@
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server && npm run build:frontend",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ",
"build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts",

View File

@@ -0,0 +1,17 @@
CREATE TABLE "prodPermissions" (
"prodPerm_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text NOT NULL,
"description" text NOT NULL,
"roles" jsonb DEFAULT '[]'::jsonb,
"rolesLegacy" jsonb DEFAULT '[]'::jsonb,
"add_User" text DEFAULT 'LST_System' NOT NULL,
"add_Date" timestamp DEFAULT now(),
"upd_User" text DEFAULT 'LST_System' NOT NULL,
"upd_date" timestamp DEFAULT now()
);
--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "add_user" text DEFAULT 'lst_user';--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "add_date" timestamp DEFAULT now();--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "upd_user" text DEFAULT 'lst_user';--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "upd_date" timestamp DEFAULT now();--> statement-breakpoint
CREATE UNIQUE INDEX "prodPermName" ON "prodPermissions" USING btree ("name");

File diff suppressed because it is too large Load Diff

View File

@@ -134,6 +134,13 @@
"when": 1760480733009,
"tag": "0018_aspiring_silver_samurai",
"breakpoints": true
},
{
"idx": 19,
"version": "7",
"when": 1760623729227,
"tag": "0019_bizarre_tarot",
"breakpoints": true
}
]
}

13
package-lock.json generated
View File

@@ -42,6 +42,7 @@
"@types/node": "^24.7.1",
"@types/nodemailer": "^7.0.2",
"@types/nodemailer-express-handlebars": "^4.0.5",
"@types/pg": "^8.15.5",
"concurrently": "^9.2.1",
"cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0",
@@ -3384,6 +3385,18 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/pg": {
"version": "8.15.5",
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.15.5.tgz",
"integrity": "sha512-LF7lF6zWEKxuT3/OR8wAZGzkg4ENGXFNyiV/JeOt9z5B+0ZVwbql9McqX5c/WStFq1GaGso7H1AzP/qSzmlCKQ==",
"devOptional": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"pg-protocol": "*",
"pg-types": "^2.2.0"
}
},
"node_modules/@types/qs": {
"version": "6.14.0",
"dev": true,

View File

@@ -12,7 +12,7 @@
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
"dev": "concurrently -n \"server,frontend,docs\" -c \"#007755,#2f6da3,#DB4FE0\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\"",
"copy:docs": "node scripts/lstDocCopy.mjs",
"build:app": "rimraf dist && npx tsc",
"build:app": "rimraf dist && npx tsc && xcopy app\\src\\internal\\system\\controller\\settings\\settings.json dist\\src\\internal\\system\\controller\\settings /E /I /Y",
"build:front": "cd frontend && rimraf dist && npm run build",
"build:docs": "cd lstDocs && rimraf build && npm run build",
"build:wrapper": "cd lstWrapper && rimraf publish && dotnet publish -c Release -o ./publish",
@@ -75,6 +75,7 @@
"@types/node": "^24.7.1",
"@types/nodemailer": "^7.0.2",
"@types/nodemailer-express-handlebars": "^4.0.5",
"@types/pg": "^8.15.5",
"concurrently": "^9.2.1",
"cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0",