Compare commits

...

6 Commits

44 changed files with 2937 additions and 827 deletions

View File

@@ -12,6 +12,9 @@ LOG_LEVEL=debug
# alpaprod tec apiKey # alpaprod tec apiKey
TEC_API_KEY=api key TEC_API_KEY=api key
# v1 listener
DATABASE_URL_V1=postgresql://ausername:password@localhost:5433/lst_db
# postgres connection # postgres connection
DATABASE_HOST=localhost DATABASE_HOST=localhost
DATABASE_PORT=5432 DATABASE_PORT=5432

16
.vscode/settings.json vendored
View File

@@ -1,9 +1,11 @@
{ {
"editor.defaultFormatter": "biomejs.biome", "editor.defaultFormatter": "biomejs.biome",
"workbench.colorTheme": "Default Dark+", "workbench.colorTheme": "Default Dark+",
"terminal.integrated.env.windows": {}, "terminal.integrated.env.windows": {},
"editor.formatOnSave": true, "editor.formatOnSave": true,
"editor.codeActionsOnSave": {"source.fixAll.biome": "explicit", "editor.codeActionsOnSave": {
"source.organizeImports.biome": "explicit" }, "source.fixAll.biome": "explicit",
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"] "source.organizeImports.biome": "explicit"
},
"cSpell.words": ["alpla", "alplamart", "alplaprod", "intiallally", "ppoo"]
} }

View File

@@ -4,8 +4,8 @@ meta {
seq: 3 seq: 3
} }
post { patch {
url: {{url}}/lst/api/admin/:userID/grant url: {{url}}/lst/api/admin/:userID/revoke
body: json body: json
auth: inherit auth: inherit
} }
@@ -16,8 +16,8 @@ params:path {
body:json { body:json {
{ {
"module":"siloAdjustments", "module":"ocp"
"role":"viewer"
} }
} }

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 4
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,28 @@
meta {
name: Preprint
type: http
seq: 1
}
post {
url: {{url}}/lst/api/logistics/labeling/preprint
body: json
auth: inherit
}
body:json {
{
"scannerId": 999,
"lotNr": 26321,
"machineId": 3, // 457=22, 458=23
"printerId": 7, // 457=22, 458=23
"layoutId": 25,
"numberOfCopies": 0,
"qtyToPrint": 5
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: labeling
seq: 1
}
auth {
mode: inherit
}

View File

@@ -1,7 +1,7 @@
vars { vars {
url: http://localhost:4200 url: https://usmcd1vms036.alpla.net
session_cookie: session_cookie:
urlv2: http://usiow1vms006:3001 urlv2: http://usmcd1vms036:3000
jwtV2: jwtV2:
} }
vars:secret [ vars:secret [

View File

@@ -1,200 +1,201 @@
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import { toNodeHandler } from "better-auth/node";
import cors from "cors";
import express from "express"; import express from "express";
import morgan from "morgan";
import { createServer } from "http"; import { createServer } from "http";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js"; import morgan from "morgan";
import { printers } from "./src/internal/ocp/printers/printers.js"; import os from "os";
import { dirname, join } from "path"; import { dirname, join } from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { baseSettings } from "./src/internal/system/controller/settings/baseSettings.js";
import { auth } from "./src/pkg/auth/auth.js";
import { db } from "./src/pkg/db/db.js"; import { db } from "./src/pkg/db/db.js";
import { settings } from "./src/pkg/db/schema/settings.js"; import { settings } from "./src/pkg/db/schema/settings.js";
import { validateEnv } from "./src/pkg/utils/envValidator.js";
import { createLogger } from "./src/pkg/logger/logger.js"; import { createLogger } from "./src/pkg/logger/logger.js";
import { returnFunc } from "./src/pkg/utils/return.js"; import { v1Listener } from "./src/pkg/logger/v1Listener.js";
import { initializeProdPool } from "./src/pkg/prodSql/prodSqlConnect.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import os from "os";
import cors from "cors";
import { sendNotify } from "./src/pkg/utils/notify.js";
import { toNodeHandler } from "better-auth/node";
import { auth } from "./src/pkg/auth/auth.js";
import { apiHitMiddleware } from "./src/pkg/middleware/apiHits.js"; import { apiHitMiddleware } from "./src/pkg/middleware/apiHits.js";
import { initializeProdPool } from "./src/pkg/prodSql/prodSqlConnect.js";
import { validateEnv } from "./src/pkg/utils/envValidator.js";
import { sendNotify } from "./src/pkg/utils/notify.js";
import { returnFunc } from "./src/pkg/utils/return.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import { setupIoServer } from "./src/ws/server.js"; import { setupIoServer } from "./src/ws/server.js";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
const main = async () => { const main = async () => {
const env = validateEnv(process.env); const env = validateEnv(process.env);
const PORT = Number(env.VITE_PORT) || 4200; const PORT = Number(env.VITE_PORT) || 4200;
//create the logger //create the logger
const log = createLogger({ module: "system", subModule: "main start" }); const log = createLogger({ module: "system", subModule: "main start" });
// base path // base path
let basePath: string = ""; let basePath: string = "";
const __filename = fileURLToPath(import.meta.url); const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename); const __dirname = dirname(__filename);
// Db connection stuff // Db connection stuff
const res = await tryCatch(db.select().from(settings)); const res = await tryCatch(db.select().from(settings));
if (res.error) { if (res.error) {
return returnFunc({ return returnFunc({
success: false, success: false,
module: "system", module: "system",
level: "fatal", level: "fatal",
message: `Database lookup failed`, message: `Database lookup failed`,
notify: false, notify: false,
data: [], data: [],
}); });
} }
if (res.data.length === 0) { if (res.data.length === 0) {
//return //return
// returnFunc({ // returnFunc({
// success: false, // success: false,
// module: "system", // module: "system",
// level: "fatal", // level: "fatal",
// message: `This seems to be the first time you have started the app please validate the settings have been intiated`, // message: `This seems to be the first time you have started the app please validate the settings have been intiated`,
// notify: false, // notify: false,
// data: [], // data: [],
// }); // });
} }
// connect to the prod sql // connect to the prod sql
await initializeProdPool(); await initializeProdPool();
// express app // express app
const app = express(); const app = express();
// global env that run only in dev // global env that run only in dev
if (process.env.NODE_ENV?.trim() !== "production") { if (process.env.NODE_ENV?.trim() !== "production") {
app.use(morgan("tiny")); app.use(morgan("tiny"));
basePath = "/lst"; basePath = "/lst";
app.use( app.use(
basePath + "/test", basePath + "/test",
express.static(join(__dirname, "../controller")) express.static(join(__dirname, "../controller")),
); );
} }
// global middleware // global middleware
app.set("trust proxy", true); app.set("trust proxy", true);
app.use(apiHitMiddleware); app.use(apiHitMiddleware);
app.all(basePath + "/api/auth/*splat", toNodeHandler(auth)); // sign-in sign-out app.all(basePath + "/api/auth/*splat", toNodeHandler(auth)); // sign-in sign-out
app.use(express.json()); app.use(express.json());
const allowedOrigins = [ const allowedOrigins = [
/^https?:\/\/localhost:(5173|5500|4200|3000|4000)$/, // all the allowed backend ports /^https?:\/\/localhost:(5173|5500|4200|3000|4000)$/, // all the allowed backend ports
/^https?:\/\/.*\.alpla\.net$/, /^https?:\/\/.*\.alpla\.net$/,
env.BETTER_AUTH_URL, // prod env.BETTER_AUTH_URL, // prod
]; ];
app.use( app.use(
cors({ cors({
origin: (origin, callback) => { origin: (origin, callback) => {
//console.log("CORS request from origin:", origin); //console.log("CORS request from origin:", origin);
if (!origin) return callback(null, true); // allow same-site or direct calls if (!origin) return callback(null, true); // allow same-site or direct calls
try { try {
const hostname = new URL(origin).hostname; // strips protocol/port const hostname = new URL(origin).hostname; // strips protocol/port
//console.log("Parsed hostname:", hostname); //console.log("Parsed hostname:", hostname);
if (allowedOrigins.includes(origin)) { if (allowedOrigins.includes(origin)) {
return callback(null, true); return callback(null, true);
} }
// Now this works for *.alpla.net // Now this works for *.alpla.net
if ( if (hostname.endsWith(".alpla.net") || hostname === "alpla.net") {
hostname.endsWith(".alpla.net") || return callback(null, true);
hostname === "alpla.net" }
) { } catch (err) {
return callback(null, true); //console.error("Invalid Origin header:", origin);
} }
} catch (err) {
//console.error("Invalid Origin header:", origin);
}
return callback(new Error("Not allowed by CORS: " + origin)); return callback(new Error("Not allowed by CORS: " + origin));
}, },
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"], methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true, credentials: true,
}) }),
); );
// docs and api stuff // docs and api stuff
app.use( app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build")));
basePath + "/d", app.use(
express.static(join(__dirname, "../lstDocs/build")) basePath + "/app",
); express.static(join(__dirname, "../frontend/dist")),
app.use( );
basePath + "/app",
express.static(join(__dirname, "../frontend/dist"))
);
// server setup // server setup
const server = createServer(app); const server = createServer(app);
// register app // register app
setupRoutes(app, basePath); setupRoutes(app, basePath);
// ws stuff // ws stuff
setupIoServer(server, basePath); setupIoServer(server, basePath);
// sub systems // start all systems after we are intiallally up and running
printers(); setTimeout(() => {
schedulerManager(); baseSettings();
printers();
schedulerManager();
// start the server up // start up the v1listener
server.listen(PORT, "0.0.0.0", () => v1Listener();
log.info( }, 5 * 1000);
{ stack: { name: "test" } },
`Server running in ${
process.env.NODE_ENV ? process.env.NODE_ENV : "dev"
}, on http://0.0.0.0:${PORT}${basePath}`
)
);
process.on("uncaughtException", async (err) => { // start the server up
//console.log("Uncaught Exception:", err); server.listen(PORT, "0.0.0.0", () =>
// await closePool(); log.info(
// const emailData = { { stack: { name: "test" } },
// email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused. `Server running in ${
// subject: `${os.hostname()} has just encountered a crash.`, process.env.NODE_ENV ? process.env.NODE_ENV : "dev"
// template: "serverCrash", }, on http://0.0.0.0:${PORT}${basePath}`,
// context: { ),
// error: err, );
// plant: `${os.hostname()}`,
// },
// };
if (!process.env.WEBHOOK_URL) { process.on("uncaughtException", async (err) => {
// await sendEmail(emailData); //console.log("Uncaught Exception:", err);
} else { // await closePool();
log.fatal({ stack: err.stack }, err.message); // const emailData = {
await sendNotify({ // email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused.
module: "system", // subject: `${os.hostname()} has just encountered a crash.`,
subModule: "fatalCrash", // template: "serverCrash",
hostname: os.hostname(), // context: {
message: err.message, // error: err,
stack: err?.stack, // plant: `${os.hostname()}`,
}); // },
} // };
//process.exit(1); if (!process.env.WEBHOOK_URL) {
}); // await sendEmail(emailData);
} else {
log.fatal({ stack: err.stack }, err.message);
await sendNotify({
module: "system",
subModule: "fatalCrash",
hostname: os.hostname(),
message: err.message,
stack: err?.stack,
});
}
// setInterval(() => { //process.exit(1);
// const used = process.memoryUsage(); });
// console.log(
// `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${( // setInterval(() => {
// used.rss / // const used = process.memoryUsage();
// 1024 / // console.log(
// 1024 // `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
// ).toFixed(2)} MB` // used.rss / 1024 / 1024
// ); // ).toFixed(2)} MB`,
// }, 10000); // );
// }, 10000);
}; };
main(); main();

View File

@@ -0,0 +1,22 @@
/**
* This is intended for when running as dev so we can always keep the servers in sync with the main server.
* in the event the server has a change on it we want to make sure we stay in sync
*/
import { createLogger } from "../../../../pkg/logger/logger.js";
export const mainServerSync = async () => {
const log = createLogger({ module: "admin", subModule: "main server sync" });
if (
process.env.NODE_ENV?.trim() !== "production" &&
process.env.MAIN_SERVER
) {
log.info(
{},
"Running in dev and have a main server set we will now pull the servers and look for any changes",
);
} else {
log.info({}, "This server is running in production no sync will happen");
return;
}
};

View File

@@ -1,6 +1,6 @@
import type { Express, Request, Response } from "express"; import type { Express, Request, Response } from "express";
import { requireAuth } from "../../pkg/middleware/authMiddleware.js"; import { requireAuth } from "../../pkg/middleware/authMiddleware.js";
import { mainServerSync } from "./controller/servers/matchServers.js";
//admin routes //admin routes
import users from "./routes/getUserRoles.js"; import users from "./routes/getUserRoles.js";
import grantRoles from "./routes/grantRole.js"; import grantRoles from "./routes/grantRole.js";
@@ -30,4 +30,9 @@ export const setupAdminRoutes = (app: Express, basePath: string) => {
requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
revokeRoles, revokeRoles,
); );
// run the sync only on startup
setTimeout(() => {
mainServerSync();
}, 5 * 1000);
}; };

View File

@@ -1,100 +1,106 @@
import { Router } from "express";
import type { Request, Response } from "express";
import {
insertServerDataSchema,
serverData,
} from "../../../../pkg/db/schema/servers.js";
import { db } from "../../../../pkg/db/db.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import type { DrizzleError } from "drizzle-orm";
import axios from "axios"; import axios from "axios";
import { createLogger } from "../../../../pkg/logger/logger.js"; import { type DrizzleError, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https"; import https from "https";
import { db } from "../../../../pkg/db/db.js";
import {
insertServerDataSchema,
serverData,
} from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router(); const router = Router();
router.post("/", async (req: Request, res: Response) => { router.post("/", async (req: Request, res: Response) => {
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there // when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname }); //res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "admin", subModule: "add server" }); const log = createLogger({ module: "admin", subModule: "add server" });
const parsed = insertServerDataSchema.safeParse(req.body); const parsed = insertServerDataSchema.safeParse(req.body);
if (!parsed.success) { if (!parsed.success) {
return res.status(400).json({ errors: parsed.error.flatten() }); return res.status(400).json({ errors: parsed.error.flatten() });
} }
const { data, error } = await tryCatch( const { data, error } = await tryCatch(
db db
.insert(serverData) .insert(serverData)
.values(parsed.data) .values({
//.onConflictDoNothing() ...parsed.data,
.returning({ add_user: req.user?.username,
name: serverData.name, add_date: sql`NOW()`,
plantToken: serverData.plantToken, upd_user: req.user?.username,
}) upd_date: sql`NOW()`,
); })
//.onConflictDoNothing()
.returning({
name: serverData.name,
plantToken: serverData.plantToken,
}),
);
if (error) { if (error) {
const err: DrizzleError = error; const err: DrizzleError = error;
return res.status(400).json({ return res.status(400).json({
message: `Error adding the server`, message: `Error adding the server`,
error: err.cause, error: err.cause,
}); });
} }
if (req.hostname === "localhost" && process.env.MAIN_SERVER) { if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server"); log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({ const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }), httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com" baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com"
withCredentials: true, withCredentials: true,
}); });
const loginRes = (await axiosInstance.post( const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`, `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{ {
username: process.env.MAIN_SERVER_USERNAME, username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD, password: process.env.MAIN_SERVER_PASSWORD,
}, },
{ {
headers: { "Content-Type": "application/json" }, headers: { "Content-Type": "application/json" },
} },
)) as any; )) as any;
const setCookie = loginRes.headers["set-cookie"][0]; const setCookie = loginRes.headers["set-cookie"][0];
if (!setCookie) { if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login"); throw new Error("Did not receive a Set-Cookie header from login");
} }
const { data, error } = await tryCatch( const { data, error } = await tryCatch(
axios.post( axios.post(
`${process.env.MAIN_SERVER}/lst/api/admin/server`, `${process.env.MAIN_SERVER}/lst/api/admin/server`,
parsed.data, parsed.data,
{ {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
Cookie: setCookie.split(";")[0], Cookie: setCookie.split(";")[0],
}, },
withCredentials: true, withCredentials: true,
} },
) ),
); );
if (error) { if (error) {
log.error( log.error(
{ stack: error }, { stack: error },
"There was an error adding the server to Main Server" "There was an error adding the server to Main Server",
); );
} }
log.info( log.info(
{ stack: data?.data }, { stack: data?.data },
"A new Server was just added to the server." "A new Server was just added to the server.",
); );
} }
return res return res
.status(201) .status(201)
.json({ message: `Server ${data[0]?.name} added`, data: data }); .json({ message: `Server ${data[0]?.name} added`, data: data });
}); });
export default router; export default router;

View File

@@ -1,139 +1,141 @@
import { Router } from "express"; import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express"; import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js"; import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js"; import { serverData } from "../../../../pkg/db/schema/servers.js";
import { eq } from "drizzle-orm";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import axios from "axios";
import { createLogger } from "../../../../pkg/logger/logger.js"; import { createLogger } from "../../../../pkg/logger/logger.js";
import https from "https"; import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router(); const router = Router();
router.patch("/:token", async (req: Request, res: Response) => { router.patch("/:token", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update server" }); const log = createLogger({ module: "admin", subModule: "update server" });
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict. // when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token; const token = req.params.token;
const updates: Record<string, any> = {}; const updates: Record<string, any> = {};
if (req.body?.name !== undefined) { if (req.body?.name !== undefined) {
updates.name = req.body.name; updates.name = req.body.name;
} }
if (req.body?.serverDNS !== undefined) { if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS; updates.serverDNS = req.body.serverDNS;
} }
if (req.body?.ipAddress !== undefined) { if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress; updates.ipAddress = req.body.ipAddress;
} }
if (req.body?.greatPlainsPlantCode !== undefined) { if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode; updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
} }
if (req.body?.lstServerPort !== undefined) { if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort; updates.lstServerPort = req.body.lstServerPort;
} }
if (req.body?.serverLoc !== undefined) { if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc; updates.serverLoc = req.body.serverLoc;
} }
if (req.body?.streetAddress !== undefined) { if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress; updates.streetAddress = req.body.streetAddress;
} }
if (req.body?.cityState !== undefined) { if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState; updates.cityState = req.body.cityState;
} }
if (req.body?.zipcode !== undefined) { if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode; updates.zipcode = req.body.zipcode;
} }
if (req.body?.contactEmail !== undefined) { if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail; updates.contactEmail = req.body.contactEmail;
} }
if (req.body?.contactPhone !== undefined) { if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone; updates.contactPhone = req.body.contactPhone;
} }
if (req.body?.customerTiAcc !== undefined) { if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc; updates.customerTiAcc = req.body.customerTiAcc;
} }
if (req.body?.active !== undefined) { if (req.body?.active !== undefined) {
updates.active = req.body.active; updates.active = req.body.active;
} }
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.set(updates)
.where(eq(serverData.plantToken, token));
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) { updates.upd_user = req.user!.username || "lst_user";
log.info({}, "Running in dev server about to add in a new server"); updates.upd_date = sql`NOW()`;
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
const loginRes = (await axiosInstance.post( try {
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`, if (Object.keys(updates).length > 0) {
{ await db
username: process.env.MAIN_SERVER_USERNAME, .update(serverData)
password: process.env.MAIN_SERVER_PASSWORD, .set(updates)
}, .where(eq(serverData.plantToken, token));
{ }
headers: { "Content-Type": "application/json" },
}
)) as any;
const setCookie = loginRes?.headers["set-cookie"][0]; if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
//console.log(setCookie.split(";")[0].replace("__Secure-", "")); const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
if (!setCookie) { const setCookie = loginRes?.headers["set-cookie"][0];
throw new Error(
"Did not receive a Set-Cookie header from login"
);
}
const { data, error } = await tryCatch( //console.log(setCookie.split(";")[0].replace("__Secure-", ""));
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
}
)
);
if (error) { if (!setCookie) {
console.log(error); throw new Error("Did not receive a Set-Cookie header from login");
log.error( }
{ stack: error },
"There was an error adding the server to Main Server" const { data, error } = await tryCatch(
); axios.patch(
} `${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
log.info( updates,
{ stack: data?.data }, {
"A new Server was just added to the server." headers: {
); "Content-Type": "application/json",
} Cookie: setCookie.split(";")[0],
res.status(200).json({ message: `${token} Server was just updated` }); },
} catch (error) { withCredentials: true,
console.log(error); },
res.status(400).json({ message: "Error Server updated", error }); ),
} );
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });
}
}); });
export default router; export default router;

View File

@@ -0,0 +1,133 @@
/**
* we want to be able to preprint labels from finished lots.
* we will need a lot number
* machine
* printer
* qty will come over as one by default
* copies will come over as 0 by default
* layout
*/
import { createLogger } from "../../../../pkg/logger/logger.js";
import { delay } from "../../../../pkg/utils/delay.js";
import { prodEndpoint } from "../../../../pkg/utils/prodEndpoint.js";
import type { returnFunc } from "../../../../pkg/utils/return.js";
export type Preprint = {
scannerId: number;
lotNr: number;
machineId: number;
printerId: number;
layoutId: number;
numberOfCopies: number;
qtyToPrint: number;
};
export const preprintLabels = async (preprint: Preprint) => {
const log = createLogger({
module: "logistics",
subModule: "preprint",
});
let x = 0;
const labelsPrinted: number[] = [];
if (preprint.qtyToPrint > 1) {
do {
const labels = await prodEndpoint(
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
},
);
if (labels?.data.Result === 1) {
log.error(
{},
`There was an error printing the label: ${labels.data.Message}`,
);
return {
success: false,
message: `${labels.data.Message}`,
};
} else {
if (!labels?.success) {
log.error(
{ error: labels?.data },
`There was an error printing the label`,
);
return {
success: false,
message: `${labels?.message}`,
data: labels?.data,
};
}
labelsPrinted.push(parseInt(labels?.data.SSCC.slice(10, -1)));
log.info(
{},
`Label just created ${parseInt(labels?.data.SSCC.slice(10, -1))} and printed, remaining to print ${preprint.qtyToPrint - x}`,
);
}
await delay(250);
x++;
} while (x < preprint.qtyToPrint);
return {
success: true,
message: `${preprint.qtyToPrint} were just printed`,
data: labelsPrinted,
};
} else {
const labels = await prodEndpoint(
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
},
);
if (labels?.data.Result === 1) {
log.error(
{},
`There was an error printing the label: ${labels.data.Message}`,
);
return {
success: false,
message: `${labels.data.Message}`,
};
} else {
if (!labels?.success) {
log.error(
{ error: labels?.data },
`There was an error printing the label`,
);
return {
success: false,
message: `${labels?.message}`,
data: labels?.data,
};
}
labelsPrinted.push(parseInt(labels.data.SSCC.slice(10, -1)));
log.info(
{},
`Label just created ${parseInt(labels.data.SSCC.slice(10, -1))} and printed`,
);
return {
success: true,
message: `${preprint.qtyToPrint} were just printed.`,
data: labelsPrinted,
};
}
}
};

View File

@@ -50,7 +50,7 @@ export const schedulerManager = async () => {
//console.log(data); //console.log(data);
if (orderData.length === 0) { if (orderData.length === 0) {
log.info({}, "There are no new orders or incoming to be updated"); log.debug({}, "There are no new orders or incoming to be updated");
return; return;
} }

View File

@@ -1,16 +1,17 @@
import type { Express, Request, Response } from "express"; import type { Express, Request, Response } from "express";
import { requireAuth } from "../../pkg/middleware/authMiddleware.js"; import labeling from "./routes/labeling/labelingRoutes.js";
import schedule from "./routes/scheduler/scheduleRoutes.js"; import schedule from "./routes/scheduler/scheduleRoutes.js";
export const setupLogisticsRoutes = (app: Express, basePath: string) => { export const setupLogisticsRoutes = (app: Express, basePath: string) => {
app.use(basePath + "/api/logistics/schedule", schedule); app.use(basePath + "/api/logistics/schedule", schedule);
app.use(basePath + "/api/logistics/labeling", labeling);
app.use( // app.use(
basePath + "/api/admin/users", // basePath + "/api/admin/users",
requireAuth("user", ["systemAdmin"]) // will pass bc system admin but this is just telling us we need this // requireAuth("user", ["systemAdmin"]), // will pass bc system admin but this is just telling us we need this
); // );
app.use( // app.use(
basePath + "/api/admin", // basePath + "/api/admin",
requireAuth("user", ["systemAdmin", "admin"]) // will pass bc system admin but this is just telling us we need this // requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
); // );
}; };

View File

@@ -0,0 +1,8 @@
import { Router } from "express";
import preprint from "./perprint.js";
const router = Router();
router.use("/", preprint);
export default router;

View File

@@ -0,0 +1,27 @@
import type { Request, Response } from "express";
import { Router } from "express";
import z from "zod";
import { preprintLabels } from "../../controller/labeling/preprint.js";
export const Preprint = z.object({
scannerId: z.number(),
lotNr: z.number(),
machineId: z.number(), // 457=22, 458=23
printerId: z.number(), // 457=22, 458=23
layoutId: z.number(),
numberOfCopies: z.number(),
qtyToPrint: z.number().default(1),
});
const router = Router();
router.post("/preprint", async (req: Request, res: Response) => {
const parsed = Preprint.safeParse(req.body);
const print = await preprintLabels(req.body);
res
.status(200)
.json({ success: print.success, message: print.message, data: print.data });
});
export default router;

View File

@@ -0,0 +1,37 @@
/**
* will be all the base settings so we dont have to remember to add ever new setting in these will be the defaults
*/
import { readFileSync } from "fs";
import path from "path";
import { fileURLToPath } from "url";
import { db } from "../../../../pkg/db/db.js";
import { settings } from "../../../../pkg/db/schema/settings.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
export const baseSettings = async () => {
const log = createLogger({ module: "system", subModule: "base settings" });
const settingsPath = path.resolve(__dirname, "./settings.json");
const newSettings = JSON.parse(readFileSync(settingsPath, "utf-8"));
const { data, error } = await tryCatch(
db
.insert(settings)
.values(newSettings)
.onConflictDoNothing()
.returning({ name: settings.name }),
);
if (error) {
log.error({ error }, "There was an error adding new settings");
}
if (data) {
log.info({ newSettingsAdded: data }, "New settings added");
}
};

View File

@@ -0,0 +1,16 @@
[
{
"name": "plantToken",
"value": "test3",
"description": "The plant token for the plant IE: test3 or usday1",
"moduleName": "system",
"roles": ["systemAdmin"]
},
{
"name": "dbServer",
"value": "usmcd1vms036",
"description": "What is the db server",
"moduleName": "system",
"roles": ["systemAdmin"]
}
]

View File

@@ -1,6 +1,12 @@
import type { Express, Request, Response } from "express"; import type { Express, Request, Response } from "express";
import settings from "./routes/settings/settingRoutes.js";
import stats from "./routes/stats.js"; import stats from "./routes/stats.js";
export const setupSystemRoutes = (app: Express, basePath: string) => { export const setupSystemRoutes = (app: Express, basePath: string) => {
app.use(basePath + "/api/system/stats", stats); app.use(basePath + "/api/system/stats", stats);
app.use(
basePath + "/api/system/settings", // will pass bc system admin but this is just telling us we need this
settings,
);
}; };

View File

@@ -0,0 +1,35 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { and, asc, eq } from "drizzle-orm";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const token = req.query.token;
const conditions = [];
if (token !== undefined) {
conditions.push(eq(serverData.plantToken, `${token}`));
}
conditions.push(eq(serverData.active, true));
const { data, error } = await tryCatch(
db
.select()
.from(serverData)
.where(and(...conditions))
.orderBy(asc(serverData.name))
);
if (error) {
return res.status(400).json({ error: error });
}
res.status(200).json({ message: "Current Active server", data: data });
});
export default router;

View File

@@ -0,0 +1,11 @@
import { Router } from "express";
import { requireAuth } from "../../../../pkg/middleware/authMiddleware.js";
import getSettings from "./getSettings.js";
import updateSetting from "./updateSetting.js";
const router = Router();
router.use("/", getSettings);
router.use("/", requireAuth("system", ["systemAdmin", "admin"]), updateSetting);
export default router;

View File

@@ -0,0 +1,141 @@
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:token", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update server" });
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token;
const updates: Record<string, any> = {};
if (req.body?.name !== undefined) {
updates.name = req.body.name;
}
if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS;
}
if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress;
}
if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
}
if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort;
}
if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc;
}
if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress;
}
if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState;
}
if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode;
}
if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail;
}
if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone;
}
if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc;
}
if (req.body?.active !== undefined) {
updates.active = req.body.active;
}
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.set(updates)
.where(eq(serverData.plantToken, token));
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
const setCookie = loginRes?.headers["set-cookie"][0];
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
},
),
);
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });
}
});
export default router;

View File

@@ -1,36 +1,40 @@
import { format } from "date-fns-tz";
import { eq } from "drizzle-orm";
import { Router } from "express"; import { Router } from "express";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { db } from "../../../pkg/db/db.js"; import { db } from "../../../pkg/db/db.js";
import { import {
serverStats, type ServerStats,
type ServerStats, serverStats,
} from "../../../pkg/db/schema/serverstats.js"; } from "../../../pkg/db/schema/serverstats.js";
import { eq } from "drizzle-orm"; import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { format } from "date-fns-tz";
import { checkBuildUpdate } from "../utlis/checkForBuild.js"; import { checkBuildUpdate } from "../utlis/checkForBuild.js";
const router = Router(); const router = Router();
// GET /health // GET /health
router.get("/", async (req, res) => { router.get("/", async (req, res) => {
const { data, error } = await tryCatch( const { data, error } = await tryCatch(
db.select().from(serverStats).where(eq(serverStats.id, "serverStats")) db.select().from(serverStats).where(eq(serverStats.id, "serverStats")),
); );
if (error || !data) { if (error || !data) {
res.status(400).json({ error: error }); res.status(400).json({ error: error });
} }
const statData = data as ServerStats[]; const statData = data as ServerStats[];
res.json({ const used = process.memoryUsage();
status: "ok", res.json({
uptime: process.uptime(), status: "ok",
build: statData[0]?.build, uptime: process.uptime(),
pendingUpdateFile: await checkBuildUpdate(["."]), build: statData[0]?.build,
lastUpdate: statData[0]?.lastUpdate pendingUpdateFile: await checkBuildUpdate(["."]),
? format(statData[0].lastUpdate, "MM/dd/yyyy HH:mm") lastUpdate: statData[0]?.lastUpdate
: "", ? format(statData[0].lastUpdate, "MM/dd/yyyy HH:mm")
}); : "",
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024
).toFixed(2)} MB`,
});
}); });
export default router; export default router;

View File

@@ -0,0 +1,27 @@
import {
jsonb,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
} from "drizzle-orm/pg-core";
export const prodPermissions = pgTable(
"prodPermissions",
{
prodPerm_id: uuid("prodPerm_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
description: text("description").notNull(),
roles: jsonb("roles").default([]),
rolesLegacy: jsonb("rolesLegacy").default([]),
add_User: text("add_User").default("LST_System").notNull(),
add_Date: timestamp("add_Date").defaultNow(),
upd_user: text("upd_User").default("LST_System").notNull(),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("prodPermName").on(table.name),
],
);

View File

@@ -1,49 +1,53 @@
import { import {
boolean, boolean,
integer, integer,
pgTable, pgTable,
text, text,
timestamp, timestamp,
uniqueIndex, uniqueIndex,
uuid, uuid,
} from "drizzle-orm/pg-core"; } from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod"; import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import z from "zod"; import z from "zod";
export const serverData = pgTable( export const serverData = pgTable(
"serverData", "serverData",
{ {
server_id: uuid("server_id").defaultRandom().primaryKey(), server_id: uuid("server_id").defaultRandom().primaryKey(),
name: text("name").notNull(), name: text("name").notNull(),
serverDNS: text("serverDNS").notNull(), serverDNS: text("serverDNS").notNull(),
plantToken: text("plantToken").notNull(), plantToken: text("plantToken").notNull(),
ipAddress: text("ipAddress").notNull(), ipAddress: text("ipAddress").notNull(),
greatPlainsPlantCode: integer("greatPlainsPlantCode").notNull(), greatPlainsPlantCode: integer("greatPlainsPlantCode").notNull(),
streetAddress: text("streetAddress"), streetAddress: text("streetAddress"),
cityState: text("cityState"), cityState: text("cityState"),
zipcode: integer("zipcode"), zipcode: integer("zipcode"),
contactEmail: text("contactEmail"), contactEmail: text("contactEmail"),
contactPhone: text("contactPhone"), contactPhone: text("contactPhone"),
customerTiAcc: text("customerTiAcc"), customerTiAcc: text("customerTiAcc"),
lstServerPort: integer("lstServerPort").notNull(), lstServerPort: integer("lstServerPort").notNull(),
active: boolean("active").default(true), active: boolean("active").default(true),
serverLoc: text("serverLoc").notNull(), serverLoc: text("serverLoc").notNull(),
lastUpdated: timestamp("lastUpdated").defaultNow(), lastUpdated: timestamp("lastUpdated").defaultNow(),
isUpgrading: boolean("isUpgrading").default(false), isUpgrading: boolean("isUpgrading").default(false),
}, add_user: text("add_user").default("lst_user"),
add_date: timestamp("add_date").defaultNow(),
upd_user: text("upd_user").default("lst_user"),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [ (table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("plantToken").on(table.plantToken), uniqueIndex("plantToken").on(table.plantToken),
] ],
); );
export const selectServerDataSchema = createSelectSchema(serverData); export const selectServerDataSchema = createSelectSchema(serverData);
export const insertServerDataSchema = createInsertSchema(serverData).extend({ export const insertServerDataSchema = createInsertSchema(serverData).extend({
contactEmail: z.email().optional(), contactEmail: z.email().optional(),
// zipcode: z // zipcode: z
// .string() // .string()
// .regex(/^\d{5}$/) // .regex(/^\d{5}$/)
// .optional(), // .optional(),
}); });

View File

@@ -0,0 +1,70 @@
import { Client } from "pg";
import { createLogger } from "./logger.js";
type NewLog = {
level: string;
username: string;
service: string;
message: string;
checked: boolean;
add_Date: Date;
};
export const v1Listener = async () => {
const log = createLogger({ module: "logger", subModule: "Old logging app" });
const client = new Client({
connectionString: process.env.DATABASE_URL_V1,
});
await client.connect();
// the notify channel to listen for logs on
const channels = ["logs_channel", "users_channel", "orders_channel"];
for (const ch of channels) {
await client.query(`LISTEN ${ch}`);
}
console.log("Listening for:", channels.join(", "));
// create the log function to be able to mimic what is coming over
const logEvent = (newLog: string) => {
const newLogEvent: NewLog = JSON.parse(newLog);
switch (newLogEvent.level) {
case "info":
log.info(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
break;
case "error":
log.error(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
break;
default:
log.info(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
}
};
client.on("notification", (msg) => {
// msg.channel tells which channel it came from
// msg.payload is whatever message you sent from the trigger
switch (msg.channel) {
case "logs_channel":
logEvent(msg.payload || "");
break;
case "users_channel":
console.log("👤 User event:", msg.payload);
break;
case "orders_channel":
console.log("🛒 Order event:", msg.payload);
break;
default:
console.log("Other event:", msg);
}
});
};

View File

@@ -1,90 +1,92 @@
import type { Request, Response, NextFunction } from "express";
import { auth } from "../auth/auth.js";
import { userRoles, type UserRole } from "../db/schema/user_roles.js";
import { db } from "../db/db.js";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
import type { NextFunction, Request, Response } from "express";
import { auth } from "../auth/auth.js";
import { db } from "../db/db.js";
import { type UserRole, userRoles } from "../db/schema/user_roles.js";
declare global { declare global {
namespace Express { namespace Express {
interface Request { interface Request {
user?: { user?: {
id: string; id: string;
email?: string; email?: string;
roles: Record<string, string[]>; roles: Record<string, string[]>;
}; username?: string | null;
} };
} }
}
} }
function toWebHeaders(nodeHeaders: Request["headers"]): Headers { function toWebHeaders(nodeHeaders: Request["headers"]): Headers {
const h = new Headers(); const h = new Headers();
for (const [key, value] of Object.entries(nodeHeaders)) { for (const [key, value] of Object.entries(nodeHeaders)) {
if (Array.isArray(value)) { if (Array.isArray(value)) {
value.forEach((v) => h.append(key, v)); value.forEach((v) => h.append(key, v));
} else if (value !== undefined) { } else if (value !== undefined) {
h.set(key, value); h.set(key, value);
} }
} }
return h; return h;
} }
export const requireAuth = (moduleName?: string, requiredRoles?: string[]) => { export const requireAuth = (moduleName?: string, requiredRoles?: string[]) => {
return async (req: Request, res: Response, next: NextFunction) => { return async (req: Request, res: Response, next: NextFunction) => {
try { try {
const headers = toWebHeaders(req.headers); const headers = toWebHeaders(req.headers);
// Get session // Get session
const session = await auth.api.getSession({ const session = await auth.api.getSession({
headers, headers,
query: { disableCookieCache: true }, query: { disableCookieCache: true },
}); });
if (!session) { if (!session) {
return res.status(401).json({ error: "No active session" }); return res.status(401).json({ error: "No active session" });
} }
const userId = session.user.id; const userId = session.user.id;
// Get roles // Get roles
const roles = await db const roles = await db
.select() .select()
.from(userRoles) .from(userRoles)
.where(eq(userRoles.userId, userId)); .where(eq(userRoles.userId, userId));
// Organize roles by module // Organize roles by module
const rolesByModule: Record<string, string[]> = {}; const rolesByModule: Record<string, string[]> = {};
for (const r of roles) { for (const r of roles) {
if (!rolesByModule[r.module]) rolesByModule[r.module] = []; if (!rolesByModule[r.module]) rolesByModule[r.module] = [];
rolesByModule[r.module].push(r.role); rolesByModule[r.module].push(r.role);
} }
req.user = { req.user = {
id: userId, id: userId,
email: session.user.email, email: session.user.email,
roles: rolesByModule, roles: rolesByModule,
}; username: session.user.username,
};
// SystemAdmin override // SystemAdmin override
const hasSystemAdmin = Object.values(rolesByModule) const hasSystemAdmin = Object.values(rolesByModule)
.flat() .flat()
.includes("systemAdmin"); .includes("systemAdmin");
// Role check (skip if systemAdmin) // Role check (skip if systemAdmin)
if (requiredRoles?.length && !hasSystemAdmin) { if (requiredRoles?.length && !hasSystemAdmin) {
const moduleRoles = moduleName const moduleRoles = moduleName
? rolesByModule[moduleName] ?? [] ? (rolesByModule[moduleName] ?? [])
: Object.values(rolesByModule).flat(); : Object.values(rolesByModule).flat();
const hasAccess = moduleRoles.some((role) => const hasAccess = moduleRoles.some((role) =>
requiredRoles.includes(role) requiredRoles.includes(role),
); );
if (!hasAccess) { if (!hasAccess) {
return res.status(403).json({ error: "Forbidden" }); return res.status(403).json({ error: "Forbidden" });
} }
} }
next(); next();
} catch (err) { } catch (err) {
console.error("Auth middleware error:", err); console.error("Auth middleware error:", err);
res.status(500).json({ error: "Auth check failed" }); res.status(500).json({ error: "Auth check failed" });
} }
}; };
}; };

View File

@@ -0,0 +1,8 @@
import axios from "axios";
import https from "https";
export const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});

View File

@@ -0,0 +1,3 @@
export const delay = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};

View File

@@ -0,0 +1,90 @@
/**
* This will fire the endpoints
* we only need the endpoint url grabbed from swagger
* and the data to be passed over
*/
import axios, { type Method } from "axios";
import { eq } from "drizzle-orm";
import https from "https";
import { db } from "../db/db.js";
import { settings } from "../db/schema/settings.js";
import { createLogger } from "../logger/logger.js";
import { tryCatch } from "./tryCatch.js";
// create the test server stuff
const testServers = [
{ token: "test1", port: 8940 },
{ token: "test2", port: 8941 },
{ token: "test3", port: 8942 },
];
export const prodEndpoint = async <T>(
method: Method,
endpoint: string,
data?: T,
) => {
const log = createLogger({ module: "pkg", subModule: "prodEndpoints" });
// example url "https://usmcd1vms036.alpla.net:8942/application/public/v1.0/DemandManagement/ORDERS"
let url = "";
// as a reminder when we look for specific like below it will come over as an array this is due to more than one item could be found
const plantToken = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
const testServer = testServers.some(
(server) => server.token === plantToken[0]?.value,
);
const server = await db
.select()
.from(settings)
.where(eq(settings.name, "dbServer"));
if (testServer) {
//filter out what testserver we are
const test = testServers.filter((t) => t.token === plantToken[0].value);
url = `https://${server[0]?.value}.alpla.net:${test[0]?.port}/application${endpoint}`;
} else {
url = `https://${plantToken[0]?.value}prod.alpla.net/application${endpoint}`;
}
// create the axio instance
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
withCredentials: true,
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
const { data: api, error: apiError } = (await tryCatch(
axiosInstance({
method,
url: url,
...(data && { data }),
}),
)) as any;
if (apiError) {
log.error(
{ error: apiError?.response?.data },
"There was an error running the endpoint",
);
return {
success: false,
message: "There was an error processing the endpoint",
data: apiError.response.data,
};
}
if (api) {
return {
success: true,
message: "Prod endpoint processed",
data: api.data,
};
}
};

View File

@@ -1,56 +1,55 @@
import * as React from "react" import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area";
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area" import * as React from "react";
import { cn } from "../../lib/utils";
import { cn } from "@/lib/utils"
function ScrollArea({ function ScrollArea({
className, className,
children, children,
...props ...props
}: React.ComponentProps<typeof ScrollAreaPrimitive.Root>) { }: React.ComponentProps<typeof ScrollAreaPrimitive.Root>) {
return ( return (
<ScrollAreaPrimitive.Root <ScrollAreaPrimitive.Root
data-slot="scroll-area" data-slot="scroll-area"
className={cn("relative", className)} className={cn("relative", className)}
{...props} {...props}
> >
<ScrollAreaPrimitive.Viewport <ScrollAreaPrimitive.Viewport
data-slot="scroll-area-viewport" data-slot="scroll-area-viewport"
className="focus-visible:ring-ring/50 size-full rounded-[inherit] transition-[color,box-shadow] outline-none focus-visible:ring-[3px] focus-visible:outline-1" className="focus-visible:ring-ring/50 size-full rounded-[inherit] transition-[color,box-shadow] outline-none focus-visible:ring-[3px] focus-visible:outline-1"
> >
{children} {children}
</ScrollAreaPrimitive.Viewport> </ScrollAreaPrimitive.Viewport>
<ScrollBar /> <ScrollBar />
<ScrollAreaPrimitive.Corner /> <ScrollAreaPrimitive.Corner />
</ScrollAreaPrimitive.Root> </ScrollAreaPrimitive.Root>
) );
} }
function ScrollBar({ function ScrollBar({
className, className,
orientation = "vertical", orientation = "vertical",
...props ...props
}: React.ComponentProps<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>) { }: React.ComponentProps<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>) {
return ( return (
<ScrollAreaPrimitive.ScrollAreaScrollbar <ScrollAreaPrimitive.ScrollAreaScrollbar
data-slot="scroll-area-scrollbar" data-slot="scroll-area-scrollbar"
orientation={orientation} orientation={orientation}
className={cn( className={cn(
"flex touch-none p-px transition-colors select-none", "flex touch-none p-px transition-colors select-none",
orientation === "vertical" && orientation === "vertical" &&
"h-full w-2.5 border-l border-l-transparent", "h-full w-2.5 border-l border-l-transparent",
orientation === "horizontal" && orientation === "horizontal" &&
"h-2.5 flex-col border-t border-t-transparent", "h-2.5 flex-col border-t border-t-transparent",
className className,
)} )}
{...props} {...props}
> >
<ScrollAreaPrimitive.ScrollAreaThumb <ScrollAreaPrimitive.ScrollAreaThumb
data-slot="scroll-area-thumb" data-slot="scroll-area-thumb"
className="bg-border relative flex-1 rounded-full" className="bg-border relative flex-1 rounded-full"
/> />
</ScrollAreaPrimitive.ScrollAreaScrollbar> </ScrollAreaPrimitive.ScrollAreaScrollbar>
) );
} }
export { ScrollArea, ScrollBar } export { ScrollArea, ScrollBar };

View File

@@ -1,114 +1,113 @@
import * as React from "react" import * as React from "react";
import { cn } from "../../lib/utils";
import { cn } from "@/lib/utils"
function Table({ className, ...props }: React.ComponentProps<"table">) { function Table({ className, ...props }: React.ComponentProps<"table">) {
return ( return (
<div <div
data-slot="table-container" data-slot="table-container"
className="relative w-full overflow-x-auto" className="relative w-full overflow-x-auto"
> >
<table <table
data-slot="table" data-slot="table"
className={cn("w-full caption-bottom text-sm", className)} className={cn("w-full caption-bottom text-sm", className)}
{...props} {...props}
/> />
</div> </div>
) );
} }
function TableHeader({ className, ...props }: React.ComponentProps<"thead">) { function TableHeader({ className, ...props }: React.ComponentProps<"thead">) {
return ( return (
<thead <thead
data-slot="table-header" data-slot="table-header"
className={cn("[&_tr]:border-b", className)} className={cn("[&_tr]:border-b", className)}
{...props} {...props}
/> />
) );
} }
function TableBody({ className, ...props }: React.ComponentProps<"tbody">) { function TableBody({ className, ...props }: React.ComponentProps<"tbody">) {
return ( return (
<tbody <tbody
data-slot="table-body" data-slot="table-body"
className={cn("[&_tr:last-child]:border-0", className)} className={cn("[&_tr:last-child]:border-0", className)}
{...props} {...props}
/> />
) );
} }
function TableFooter({ className, ...props }: React.ComponentProps<"tfoot">) { function TableFooter({ className, ...props }: React.ComponentProps<"tfoot">) {
return ( return (
<tfoot <tfoot
data-slot="table-footer" data-slot="table-footer"
className={cn( className={cn(
"bg-muted/50 border-t font-medium [&>tr]:last:border-b-0", "bg-muted/50 border-t font-medium [&>tr]:last:border-b-0",
className className,
)} )}
{...props} {...props}
/> />
) );
} }
function TableRow({ className, ...props }: React.ComponentProps<"tr">) { function TableRow({ className, ...props }: React.ComponentProps<"tr">) {
return ( return (
<tr <tr
data-slot="table-row" data-slot="table-row"
className={cn( className={cn(
"hover:bg-muted/50 data-[state=selected]:bg-muted border-b transition-colors", "hover:bg-muted/50 data-[state=selected]:bg-muted border-b transition-colors",
className className,
)} )}
{...props} {...props}
/> />
) );
} }
function TableHead({ className, ...props }: React.ComponentProps<"th">) { function TableHead({ className, ...props }: React.ComponentProps<"th">) {
return ( return (
<th <th
data-slot="table-head" data-slot="table-head"
className={cn( className={cn(
"text-foreground h-10 px-2 text-left align-middle font-medium whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]", "text-foreground h-10 px-2 text-left align-middle font-medium whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className className,
)} )}
{...props} {...props}
/> />
) );
} }
function TableCell({ className, ...props }: React.ComponentProps<"td">) { function TableCell({ className, ...props }: React.ComponentProps<"td">) {
return ( return (
<td <td
data-slot="table-cell" data-slot="table-cell"
className={cn( className={cn(
"p-2 align-middle whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]", "p-2 align-middle whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className className,
)} )}
{...props} {...props}
/> />
) );
} }
function TableCaption({ function TableCaption({
className, className,
...props ...props
}: React.ComponentProps<"caption">) { }: React.ComponentProps<"caption">) {
return ( return (
<caption <caption
data-slot="table-caption" data-slot="table-caption"
className={cn("text-muted-foreground mt-4 text-sm", className)} className={cn("text-muted-foreground mt-4 text-sm", className)}
{...props} {...props}
/> />
) );
} }
export { export {
Table, Table,
TableHeader, TableHeader,
TableBody, TableBody,
TableFooter, TableFooter,
TableHead, TableHead,
TableRow, TableRow,
TableCell, TableCell,
TableCaption, TableCaption,
} };

View File

@@ -10,7 +10,7 @@ interface ShipmentItemProps {
export function ShipmentItem({ export function ShipmentItem({
shipment, shipment,
index = 0, index = 0,
perm = true, //perm = true,
}: ShipmentItemProps) { }: ShipmentItemProps) {
const { setNodeRef, listeners, attributes, transform } = useDraggable({ const { setNodeRef, listeners, attributes, transform } = useDraggable({
id: shipment.orderNumber, id: shipment.orderNumber,

View File

@@ -1,9 +1,7 @@
import { createFileRoute } from "@tanstack/react-router"; import { createFileRoute } from "@tanstack/react-router";
import { useEffect, useState } from "react";
import { coreSocket } from "../../../lib/socket.io/socket"; import { coreSocket } from "../../../lib/socket.io/socket";
import "../-components/style.css"; import "../-components/style.css";
import moment from "moment";
import Timeline from "react-calendar-timeline";
export const Route = createFileRoute("/(logistics)/logistics/deliverySchedule")( export const Route = createFileRoute("/(logistics)/logistics/deliverySchedule")(
{ {
@@ -19,9 +17,9 @@ export const Route = createFileRoute("/(logistics)/logistics/deliverySchedule")(
function RouteComponent() { function RouteComponent() {
// connect to the channel // connect to the channel
const [shipments, setShipments] = useState([]) as any; //const [shipments, setShipments] = useState([]) as any;
//const [perm] = useState(true); // will check this for sure with a user permissions //const [perm] = useState(true); // will check this for sure with a user permissions
const [loaded, setLoaded] = useState(false); //const [loaded, setLoaded] = useState(false);
// useEffect(() => { // useEffect(() => {
// const handleConnect = () => { // const handleConnect = () => {

View File

@@ -11,7 +11,7 @@ import { useEffect } from "react";
import { Toaster } from "sonner"; import { Toaster } from "sonner";
import Nav from "../components/navBar/Nav"; import Nav from "../components/navBar/Nav";
import SideBarNav from "../components/navBar/SideBarNav"; import SideBarNav from "../components/navBar/SideBarNav";
import { SidebarProvider, SidebarTrigger } from "../components/ui/sidebar"; import { SidebarProvider } from "../components/ui/sidebar";
import { userAccess } from "../lib/authClient"; import { userAccess } from "../lib/authClient";
import { SessionGuard } from "../lib/providers/SessionProvider"; import { SessionGuard } from "../lib/providers/SessionProvider";
import { ThemeProvider } from "../lib/providers/theme-provider"; import { ThemeProvider } from "../lib/providers/theme-provider";

View File

@@ -91,6 +91,10 @@ function RouteComponent() {
); );
}, },
}), }),
// password reset will do the email flow
// change password an input for this one so well need inline editing for this dope one
// trash can to delete user
// last login -- need to get working on the server side as well.
columnHelper.accessor("roles", { columnHelper.accessor("roles", {
header: () => <span>Roles</span>, header: () => <span>Roles</span>,
cell: ({ row }) => { cell: ({ row }) => {

View File

@@ -0,0 +1,12 @@
CREATE OR REPLACE FUNCTION notify_new_log()
RETURNS trigger AS $$
BEGIN
PERFORM pg_notify('logs_channel', row_to_json(NEW)::text);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER logs_notify_trigger
AFTER INSERT ON logs
FOR EACH ROW
EXECUTE FUNCTION notify_new_log();

View File

@@ -1,97 +1,97 @@
{ {
"name": "lstv2", "name": "lstv2",
"version": "2.27.0", "version": "2.27.0",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"", "dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"",
"dev:server": "dotenvx run -f .env -- tsx watch server/index.ts", "dev:server": "dotenvx run -f .env -- tsx watch server/index.ts",
"dev:frontend": "cd frontend && npm run dev", "dev:frontend": "cd frontend && npm run dev",
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts", "dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts", "dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server && npm run build:frontend", "build": "npm run build:server && npm run build:frontend",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y", "build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ",
"build:frontend": "cd frontend && npm run build", "build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild", "build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts", "copy:scripts": "tsx server/scripts/copyScripts.ts",
"copy:servers": "xcopy server\\services\\server\\utils\\serverData.json dist\\server\\services\\server\\utils /E /I /Y", "copy:servers": "xcopy server\\services\\server\\utils\\serverData.json dist\\server\\services\\server\\utils /E /I /Y",
"start": "set NODE_ENV=production && npm run start:server", "start": "set NODE_ENV=production && npm run start:server",
"start:server": "dotenvx run -f .env -- node dist/server/index.js", "start:server": "dotenvx run -f .env -- node dist/server/index.js",
"db:generate": "npx drizzle-kit generate", "db:generate": "npx drizzle-kit generate",
"db:migrate": "npx drizzle-kit push", "db:migrate": "npx drizzle-kit push",
"db:dev": "npm run build && npm run db:generate && npm run db:migrate", "db:dev": "npm run build && npm run db:generate && npm run db:migrate",
"deploy": "standard-version --conventional-commits && npm run build", "deploy": "standard-version --conventional-commits && npm run build",
"zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"", "zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"newBuild": "npm run build:server && npm run build:frontend", "newBuild": "npm run build:server && npm run build:frontend",
"copyToNew": "powershell -ExecutionPolicy Bypass -File server/scripts/copyToLst.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"", "copyToNew": "powershell -ExecutionPolicy Bypass -File server/scripts/copyToLst.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"removeOld": "rimraf dist && rimraf frontend/dist", "removeOld": "rimraf dist && rimraf frontend/dist",
"prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev", "prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev",
"commit": "cz", "commit": "cz",
"prodinstall": "npm i --omit=dev && npm run db:migrate", "prodinstall": "npm i --omit=dev && npm run db:migrate",
"checkupdates": "npx npm-check-updates", "checkupdates": "npx npm-check-updates",
"testingCode": "dotenvx run -f .env -- tsx watch server/services/logistics/controller/warehouse/cycleCountChecks/cyclecountCheck.ts" "testingCode": "dotenvx run -f .env -- tsx watch server/services/logistics/controller/warehouse/cycleCountChecks/cyclecountCheck.ts"
}, },
"config": { "config": {
"commitizen": { "commitizen": {
"path": "./node_modules/cz-conventional-changelog" "path": "./node_modules/cz-conventional-changelog"
} }
}, },
"admConfig": { "admConfig": {
"build": 661, "build": 661,
"oldBuild": "backend-0.1.3.zip" "oldBuild": "backend-0.1.3.zip"
}, },
"devDependencies": { "devDependencies": {
"@types/adm-zip": "^0.5.7", "@types/adm-zip": "^0.5.7",
"@types/bcrypt": "^5.0.2", "@types/bcrypt": "^5.0.2",
"@types/fs-extra": "^11.0.4", "@types/fs-extra": "^11.0.4",
"@types/js-cookie": "^3.0.6", "@types/js-cookie": "^3.0.6",
"@types/mssql": "^9.1.7", "@types/mssql": "^9.1.7",
"@types/node": "^24.0.3", "@types/node": "^24.0.3",
"@types/node-cron": "^3.0.11", "@types/node-cron": "^3.0.11",
"@types/nodemailer": "^6.4.17", "@types/nodemailer": "^6.4.17",
"@types/pg": "^8.15.4", "@types/pg": "^8.15.4",
"@types/ws": "^8.18.1", "@types/ws": "^8.18.1",
"concurrently": "^9.1.2", "concurrently": "^9.1.2",
"cz-conventional-changelog": "^3.3.0", "cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0", "standard-version": "^9.5.0",
"tsx": "^4.20.3", "tsx": "^4.20.3",
"typescript": "^5.8.3" "typescript": "^5.8.3"
}, },
"dependencies": { "dependencies": {
"@dotenvx/dotenvx": "^1.45.1", "@dotenvx/dotenvx": "^1.45.1",
"@hono/node-server": "^1.14.4", "@hono/node-server": "^1.14.4",
"@hono/zod-openapi": "^0.19.8", "@hono/zod-openapi": "^0.19.8",
"@scalar/hono-api-reference": "^0.9.5", "@scalar/hono-api-reference": "^0.9.5",
"@tanstack/react-form": "^1.12.3", "@tanstack/react-form": "^1.12.3",
"@tanstack/react-table": "^8.21.3", "@tanstack/react-table": "^8.21.3",
"@types/jsonwebtoken": "^9.0.10", "@types/jsonwebtoken": "^9.0.10",
"@types/nodemailer-express-handlebars": "^4.0.5", "@types/nodemailer-express-handlebars": "^4.0.5",
"adm-zip": "^0.5.16", "adm-zip": "^0.5.16",
"axios": "^1.10.0", "axios": "^1.10.0",
"bcryptjs": "^3.0.2", "bcryptjs": "^3.0.2",
"croner": "^9.1.0", "croner": "^9.1.0",
"date-fns": "^4.1.0", "date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0", "date-fns-tz": "^3.2.0",
"drizzle-kit": "^0.31.1", "drizzle-kit": "^0.31.1",
"drizzle-orm": "^0.44.2", "drizzle-orm": "^0.44.2",
"drizzle-zod": "^0.8.2", "drizzle-zod": "^0.8.2",
"excel-date-to-js": "^1.1.5", "excel-date-to-js": "^1.1.5",
"fast-xml-parser": "^5.2.5", "fast-xml-parser": "^5.2.5",
"fs-extra": "^11.3.0", "fs-extra": "^11.3.0",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"morgan": "^1.10.1", "morgan": "^1.10.1",
"mssql": "^11.0.1", "mssql": "^11.0.1",
"nodemailer": "^7.0.3", "nodemailer": "^7.0.3",
"nodemailer-express-handlebars": "^7.0.0", "nodemailer-express-handlebars": "^7.0.0",
"pg": "^8.16.2", "pg": "^8.16.2",
"pino": "^9.7.0", "pino": "^9.7.0",
"pino-abstract-transport": "^2.0.0", "pino-abstract-transport": "^2.0.0",
"pino-pretty": "^13.0.0", "pino-pretty": "^13.0.0",
"postgres": "^3.4.7", "postgres": "^3.4.7",
"react-resizable-panels": "^3.0.3", "react-resizable-panels": "^3.0.3",
"rimraf": "^6.0.1", "rimraf": "^6.0.1",
"st-ethernet-ip": "^2.7.5", "st-ethernet-ip": "^2.7.5",
"ws": "^8.18.2", "ws": "^8.18.2",
"xlsx": "^0.18.5", "xlsx": "^0.18.5",
"zod": "^3.25.67" "zod": "^3.25.67"
} }
} }

View File

@@ -0,0 +1,17 @@
CREATE TABLE "prodPermissions" (
"prodPerm_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text NOT NULL,
"description" text NOT NULL,
"roles" jsonb DEFAULT '[]'::jsonb,
"rolesLegacy" jsonb DEFAULT '[]'::jsonb,
"add_User" text DEFAULT 'LST_System' NOT NULL,
"add_Date" timestamp DEFAULT now(),
"upd_User" text DEFAULT 'LST_System' NOT NULL,
"upd_date" timestamp DEFAULT now()
);
--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "add_user" text DEFAULT 'lst_user';--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "add_date" timestamp DEFAULT now();--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "upd_user" text DEFAULT 'lst_user';--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "upd_date" timestamp DEFAULT now();--> statement-breakpoint
CREATE UNIQUE INDEX "prodPermName" ON "prodPermissions" USING btree ("name");

File diff suppressed because it is too large Load Diff

View File

@@ -134,6 +134,13 @@
"when": 1760480733009, "when": 1760480733009,
"tag": "0018_aspiring_silver_samurai", "tag": "0018_aspiring_silver_samurai",
"breakpoints": true "breakpoints": true
},
{
"idx": 19,
"version": "7",
"when": 1760623729227,
"tag": "0019_bizarre_tarot",
"breakpoints": true
} }
] ]
} }

13
package-lock.json generated
View File

@@ -42,6 +42,7 @@
"@types/node": "^24.7.1", "@types/node": "^24.7.1",
"@types/nodemailer": "^7.0.2", "@types/nodemailer": "^7.0.2",
"@types/nodemailer-express-handlebars": "^4.0.5", "@types/nodemailer-express-handlebars": "^4.0.5",
"@types/pg": "^8.15.5",
"concurrently": "^9.2.1", "concurrently": "^9.2.1",
"cz-conventional-changelog": "^3.3.0", "cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0", "standard-version": "^9.5.0",
@@ -3384,6 +3385,18 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/pg": {
"version": "8.15.5",
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.15.5.tgz",
"integrity": "sha512-LF7lF6zWEKxuT3/OR8wAZGzkg4ENGXFNyiV/JeOt9z5B+0ZVwbql9McqX5c/WStFq1GaGso7H1AzP/qSzmlCKQ==",
"devOptional": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"pg-protocol": "*",
"pg-types": "^2.2.0"
}
},
"node_modules/@types/qs": { "node_modules/@types/qs": {
"version": "6.14.0", "version": "6.14.0",
"dev": true, "dev": true,

View File

@@ -1,90 +1,91 @@
{ {
"name": "lst", "name": "lst",
"version": "1.5.0", "version": "1.5.0",
"description": "Logistics support tool - the place where the support happens.", "description": "Logistics support tool - the place where the support happens.",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1", "test": "echo \"Error: no test specified\" && exit 1",
"dev:app": "dotenvx run -f .env -- tsx watch app/main.ts", "dev:app": "dotenvx run -f .env -- tsx watch app/main.ts",
"dev:docs": "npm run translateDocs && cd lstDocs && npm start", "dev:docs": "npm run translateDocs && cd lstDocs && npm start",
"dev:front": "cd frontend && npm run dev", "dev:front": "cd frontend && npm run dev",
"dev:db:migrate": "npx drizzle-kit push", "dev:db:migrate": "npx drizzle-kit push",
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts", "dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
"dev": "concurrently -n \"server,frontend,docs\" -c \"#007755,#2f6da3,#DB4FE0\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\"", "dev": "concurrently -n \"server,frontend,docs\" -c \"#007755,#2f6da3,#DB4FE0\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\"",
"copy:docs": "node scripts/lstDocCopy.mjs", "copy:docs": "node scripts/lstDocCopy.mjs",
"build:app": "rimraf dist && npx tsc", "build:app": "rimraf dist && npx tsc && xcopy app\\src\\internal\\system\\controller\\settings\\settings.json dist\\src\\internal\\system\\controller\\settings /E /I /Y",
"build:front": "cd frontend && rimraf dist && npm run build", "build:front": "cd frontend && rimraf dist && npm run build",
"build:docs": "cd lstDocs && rimraf build && npm run build", "build:docs": "cd lstDocs && rimraf build && npm run build",
"build:wrapper": "cd lstWrapper && rimraf publish && dotnet publish -c Release -o ./publish", "build:wrapper": "cd lstWrapper && rimraf publish && dotnet publish -c Release -o ./publish",
"build:ctl": " ", "build:ctl": " ",
"build": "npm run translateDocs && npm run build:docs && npm run build:front && npm run build:app", "build": "npm run translateDocs && npm run build:docs && npm run build:front && npm run build:app",
"install:front": "cd frontend && npm i", "install:front": "cd frontend && npm i",
"install:docs": "cd lstDocs && npm i", "install:docs": "cd lstDocs && npm i",
"install:app": "npm i", "install:app": "npm i",
"start:app": "node dist/main.js", "start:app": "node dist/main.js",
"start": "dotenvx run -f .env -- npm run start:app", "start": "dotenvx run -f .env -- npm run start:app",
"start:win": "set NODE_ENV=production && node dist/main.js", "start:win": "set NODE_ENV=production && node dist/main.js",
"docker": "docker compose up --build --force-recreate -d", "docker": "docker compose up --build --force-recreate -d",
"commit": "cz", "commit": "cz",
"deploy": "standard-version --conventional-commits && npm run translateDocs && npm run build && cd lstV2 && npm run build", "deploy": "standard-version --conventional-commits && npm run translateDocs && npm run build && cd lstV2 && npm run build",
"db:migrate": "npx drizzle-kit push", "db:migrate": "npx drizzle-kit push",
"db:generate": "npx drizzle-kit generate", "db:generate": "npx drizzle-kit generate",
"translateDocs": "cd scripts && node translateScript.js", "translateDocs": "cd scripts && node translateScript.js",
"auth:generate": "npx @better-auth/cli generate --config ./app/src/pkg/auth/auth.ts", "auth:generate": "npx @better-auth/cli generate --config ./app/src/pkg/auth/auth.ts",
"updates": "ncu -g" "updates": "ncu -g"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://git.tuffraid.net/cowch/lst.git" "url": "https://git.tuffraid.net/cowch/lst.git"
}, },
"keywords": [], "keywords": [],
"author": "", "author": "",
"license": "ISC", "license": "ISC",
"type": "module", "type": "module",
"dependencies": { "dependencies": {
"@dotenvx/dotenvx": "^1.51.0", "@dotenvx/dotenvx": "^1.51.0",
"@tanstack/react-table": "^8.21.3", "@tanstack/react-table": "^8.21.3",
"@types/cors": "^2.8.19", "@types/cors": "^2.8.19",
"axios": "^1.12.2", "axios": "^1.12.2",
"better-auth": "^1.3.27", "better-auth": "^1.3.27",
"cors": "^2.8.5", "cors": "^2.8.5",
"date-fns": "^4.1.0", "date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0", "date-fns-tz": "^3.2.0",
"drizzle-kit": "^0.31.5", "drizzle-kit": "^0.31.5",
"drizzle-orm": "^0.44.6", "drizzle-orm": "^0.44.6",
"drizzle-zod": "^0.8.3", "drizzle-zod": "^0.8.3",
"express": "^5.1.0", "express": "^5.1.0",
"handlebars": "^4.7.8", "handlebars": "^4.7.8",
"morgan": "^1.10.1", "morgan": "^1.10.1",
"mssql": "^12.0.0", "mssql": "^12.0.0",
"nodemailer": "^7.0.9", "nodemailer": "^7.0.9",
"nodemailer-express-handlebars": "^7.0.0", "nodemailer-express-handlebars": "^7.0.0",
"npm-check-updates": "^19.0.0", "npm-check-updates": "^19.0.0",
"pg": "^8.16.3", "pg": "^8.16.3",
"pino": "^10.0.0", "pino": "^10.0.0",
"pino-pretty": "^13.1.2", "pino-pretty": "^13.1.2",
"postgres": "^3.4.7", "postgres": "^3.4.7",
"socket.io": "^4.8.1", "socket.io": "^4.8.1",
"zod": "^4.1.12" "zod": "^4.1.12"
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "2.2.6", "@biomejs/biome": "2.2.6",
"@types/express": "^5.0.3", "@types/express": "^5.0.3",
"@types/morgan": "^1.9.10", "@types/morgan": "^1.9.10",
"@types/mssql": "^9.1.8", "@types/mssql": "^9.1.8",
"@types/node": "^24.7.1", "@types/node": "^24.7.1",
"@types/nodemailer": "^7.0.2", "@types/nodemailer": "^7.0.2",
"@types/nodemailer-express-handlebars": "^4.0.5", "@types/nodemailer-express-handlebars": "^4.0.5",
"concurrently": "^9.2.1", "@types/pg": "^8.15.5",
"cz-conventional-changelog": "^3.3.0", "concurrently": "^9.2.1",
"standard-version": "^9.5.0", "cz-conventional-changelog": "^3.3.0",
"ts-node-dev": "^2.0.0", "standard-version": "^9.5.0",
"tsx": "^4.20.6", "ts-node-dev": "^2.0.0",
"typescript": "^5.9.3" "tsx": "^4.20.6",
}, "typescript": "^5.9.3"
"config": { },
"commitizen": { "config": {
"path": "./node_modules/cz-conventional-changelog" "commitizen": {
} "path": "./node_modules/cz-conventional-changelog"
} }
}
} }