Compare commits

..

6 Commits

44 changed files with 2937 additions and 827 deletions

View File

@@ -12,6 +12,9 @@ LOG_LEVEL=debug
# alpaprod tec apiKey
TEC_API_KEY=api key
# v1 listener
DATABASE_URL_V1=postgresql://ausername:password@localhost:5433/lst_db
# postgres connection
DATABASE_HOST=localhost
DATABASE_PORT=5432

16
.vscode/settings.json vendored
View File

@@ -1,9 +1,11 @@
{
"editor.defaultFormatter": "biomejs.biome",
"workbench.colorTheme": "Default Dark+",
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {"source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit" },
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"]
"editor.defaultFormatter": "biomejs.biome",
"workbench.colorTheme": "Default Dark+",
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit"
},
"cSpell.words": ["alpla", "alplamart", "alplaprod", "intiallally", "ppoo"]
}

View File

@@ -4,8 +4,8 @@ meta {
seq: 3
}
post {
url: {{url}}/lst/api/admin/:userID/grant
patch {
url: {{url}}/lst/api/admin/:userID/revoke
body: json
auth: inherit
}
@@ -16,8 +16,8 @@ params:path {
body:json {
{
"module":"siloAdjustments",
"role":"viewer"
"module":"ocp"
}
}

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 4
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,28 @@
meta {
name: Preprint
type: http
seq: 1
}
post {
url: {{url}}/lst/api/logistics/labeling/preprint
body: json
auth: inherit
}
body:json {
{
"scannerId": 999,
"lotNr": 26321,
"machineId": 3, // 457=22, 458=23
"printerId": 7, // 457=22, 458=23
"layoutId": 25,
"numberOfCopies": 0,
"qtyToPrint": 5
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: labeling
seq: 1
}
auth {
mode: inherit
}

View File

@@ -1,7 +1,7 @@
vars {
url: http://localhost:4200
url: https://usmcd1vms036.alpla.net
session_cookie:
urlv2: http://usiow1vms006:3001
urlv2: http://usmcd1vms036:3000
jwtV2:
}
vars:secret [

View File

@@ -1,200 +1,201 @@
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import { toNodeHandler } from "better-auth/node";
import cors from "cors";
import express from "express";
import morgan from "morgan";
import { createServer } from "http";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import morgan from "morgan";
import os from "os";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { baseSettings } from "./src/internal/system/controller/settings/baseSettings.js";
import { auth } from "./src/pkg/auth/auth.js";
import { db } from "./src/pkg/db/db.js";
import { settings } from "./src/pkg/db/schema/settings.js";
import { validateEnv } from "./src/pkg/utils/envValidator.js";
import { createLogger } from "./src/pkg/logger/logger.js";
import { returnFunc } from "./src/pkg/utils/return.js";
import { initializeProdPool } from "./src/pkg/prodSql/prodSqlConnect.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import os from "os";
import cors from "cors";
import { sendNotify } from "./src/pkg/utils/notify.js";
import { toNodeHandler } from "better-auth/node";
import { auth } from "./src/pkg/auth/auth.js";
import { v1Listener } from "./src/pkg/logger/v1Listener.js";
import { apiHitMiddleware } from "./src/pkg/middleware/apiHits.js";
import { initializeProdPool } from "./src/pkg/prodSql/prodSqlConnect.js";
import { validateEnv } from "./src/pkg/utils/envValidator.js";
import { sendNotify } from "./src/pkg/utils/notify.js";
import { returnFunc } from "./src/pkg/utils/return.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import { setupIoServer } from "./src/ws/server.js";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
const main = async () => {
const env = validateEnv(process.env);
const PORT = Number(env.VITE_PORT) || 4200;
const env = validateEnv(process.env);
const PORT = Number(env.VITE_PORT) || 4200;
//create the logger
const log = createLogger({ module: "system", subModule: "main start" });
//create the logger
const log = createLogger({ module: "system", subModule: "main start" });
// base path
let basePath: string = "";
// base path
let basePath: string = "";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Db connection stuff
const res = await tryCatch(db.select().from(settings));
// Db connection stuff
const res = await tryCatch(db.select().from(settings));
if (res.error) {
return returnFunc({
success: false,
module: "system",
level: "fatal",
message: `Database lookup failed`,
notify: false,
data: [],
});
}
if (res.error) {
return returnFunc({
success: false,
module: "system",
level: "fatal",
message: `Database lookup failed`,
notify: false,
data: [],
});
}
if (res.data.length === 0) {
//return
// returnFunc({
// success: false,
// module: "system",
// level: "fatal",
// message: `This seems to be the first time you have started the app please validate the settings have been intiated`,
// notify: false,
// data: [],
// });
}
if (res.data.length === 0) {
//return
// returnFunc({
// success: false,
// module: "system",
// level: "fatal",
// message: `This seems to be the first time you have started the app please validate the settings have been intiated`,
// notify: false,
// data: [],
// });
}
// connect to the prod sql
await initializeProdPool();
// connect to the prod sql
await initializeProdPool();
// express app
const app = express();
// express app
const app = express();
// global env that run only in dev
if (process.env.NODE_ENV?.trim() !== "production") {
app.use(morgan("tiny"));
basePath = "/lst";
// global env that run only in dev
if (process.env.NODE_ENV?.trim() !== "production") {
app.use(morgan("tiny"));
basePath = "/lst";
app.use(
basePath + "/test",
express.static(join(__dirname, "../controller"))
);
}
app.use(
basePath + "/test",
express.static(join(__dirname, "../controller")),
);
}
// global middleware
app.set("trust proxy", true);
app.use(apiHitMiddleware);
app.all(basePath + "/api/auth/*splat", toNodeHandler(auth)); // sign-in sign-out
app.use(express.json());
// global middleware
app.set("trust proxy", true);
app.use(apiHitMiddleware);
app.all(basePath + "/api/auth/*splat", toNodeHandler(auth)); // sign-in sign-out
app.use(express.json());
const allowedOrigins = [
/^https?:\/\/localhost:(5173|5500|4200|3000|4000)$/, // all the allowed backend ports
/^https?:\/\/.*\.alpla\.net$/,
env.BETTER_AUTH_URL, // prod
];
const allowedOrigins = [
/^https?:\/\/localhost:(5173|5500|4200|3000|4000)$/, // all the allowed backend ports
/^https?:\/\/.*\.alpla\.net$/,
env.BETTER_AUTH_URL, // prod
];
app.use(
cors({
origin: (origin, callback) => {
//console.log("CORS request from origin:", origin);
app.use(
cors({
origin: (origin, callback) => {
//console.log("CORS request from origin:", origin);
if (!origin) return callback(null, true); // allow same-site or direct calls
if (!origin) return callback(null, true); // allow same-site or direct calls
try {
const hostname = new URL(origin).hostname; // strips protocol/port
//console.log("Parsed hostname:", hostname);
try {
const hostname = new URL(origin).hostname; // strips protocol/port
//console.log("Parsed hostname:", hostname);
if (allowedOrigins.includes(origin)) {
return callback(null, true);
}
if (allowedOrigins.includes(origin)) {
return callback(null, true);
}
// Now this works for *.alpla.net
if (
hostname.endsWith(".alpla.net") ||
hostname === "alpla.net"
) {
return callback(null, true);
}
} catch (err) {
//console.error("Invalid Origin header:", origin);
}
// Now this works for *.alpla.net
if (hostname.endsWith(".alpla.net") || hostname === "alpla.net") {
return callback(null, true);
}
} catch (err) {
//console.error("Invalid Origin header:", origin);
}
return callback(new Error("Not allowed by CORS: " + origin));
},
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true,
})
);
return callback(new Error("Not allowed by CORS: " + origin));
},
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true,
}),
);
// docs and api stuff
app.use(
basePath + "/d",
express.static(join(__dirname, "../lstDocs/build"))
);
app.use(
basePath + "/app",
express.static(join(__dirname, "../frontend/dist"))
);
// docs and api stuff
app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build")));
app.use(
basePath + "/app",
express.static(join(__dirname, "../frontend/dist")),
);
// server setup
const server = createServer(app);
// server setup
const server = createServer(app);
// register app
setupRoutes(app, basePath);
// register app
setupRoutes(app, basePath);
// ws stuff
setupIoServer(server, basePath);
// ws stuff
setupIoServer(server, basePath);
// sub systems
printers();
schedulerManager();
// start all systems after we are intiallally up and running
setTimeout(() => {
baseSettings();
printers();
schedulerManager();
// start the server up
server.listen(PORT, "0.0.0.0", () =>
log.info(
{ stack: { name: "test" } },
`Server running in ${
process.env.NODE_ENV ? process.env.NODE_ENV : "dev"
}, on http://0.0.0.0:${PORT}${basePath}`
)
);
// start up the v1listener
v1Listener();
}, 5 * 1000);
process.on("uncaughtException", async (err) => {
//console.log("Uncaught Exception:", err);
// await closePool();
// const emailData = {
// email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused.
// subject: `${os.hostname()} has just encountered a crash.`,
// template: "serverCrash",
// context: {
// error: err,
// plant: `${os.hostname()}`,
// },
// };
// start the server up
server.listen(PORT, "0.0.0.0", () =>
log.info(
{ stack: { name: "test" } },
`Server running in ${
process.env.NODE_ENV ? process.env.NODE_ENV : "dev"
}, on http://0.0.0.0:${PORT}${basePath}`,
),
);
if (!process.env.WEBHOOK_URL) {
// await sendEmail(emailData);
} else {
log.fatal({ stack: err.stack }, err.message);
await sendNotify({
module: "system",
subModule: "fatalCrash",
hostname: os.hostname(),
message: err.message,
stack: err?.stack,
});
}
process.on("uncaughtException", async (err) => {
//console.log("Uncaught Exception:", err);
// await closePool();
// const emailData = {
// email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused.
// subject: `${os.hostname()} has just encountered a crash.`,
// template: "serverCrash",
// context: {
// error: err,
// plant: `${os.hostname()}`,
// },
// };
//process.exit(1);
});
if (!process.env.WEBHOOK_URL) {
// await sendEmail(emailData);
} else {
log.fatal({ stack: err.stack }, err.message);
await sendNotify({
module: "system",
subModule: "fatalCrash",
hostname: os.hostname(),
message: err.message,
stack: err?.stack,
});
}
// setInterval(() => {
// const used = process.memoryUsage();
// console.log(
// `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
// used.rss /
// 1024 /
// 1024
// ).toFixed(2)} MB`
// );
// }, 10000);
//process.exit(1);
});
// setInterval(() => {
// const used = process.memoryUsage();
// console.log(
// `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
// used.rss / 1024 / 1024
// ).toFixed(2)} MB`,
// );
// }, 10000);
};
main();

View File

@@ -0,0 +1,22 @@
/**
* This is intended for when running as dev so we can always keep the servers in sync with the main server.
* in the event the server has a change on it we want to make sure we stay in sync
*/
import { createLogger } from "../../../../pkg/logger/logger.js";
export const mainServerSync = async () => {
const log = createLogger({ module: "admin", subModule: "main server sync" });
if (
process.env.NODE_ENV?.trim() !== "production" &&
process.env.MAIN_SERVER
) {
log.info(
{},
"Running in dev and have a main server set we will now pull the servers and look for any changes",
);
} else {
log.info({}, "This server is running in production no sync will happen");
return;
}
};

View File

@@ -1,6 +1,6 @@
import type { Express, Request, Response } from "express";
import { requireAuth } from "../../pkg/middleware/authMiddleware.js";
import { mainServerSync } from "./controller/servers/matchServers.js";
//admin routes
import users from "./routes/getUserRoles.js";
import grantRoles from "./routes/grantRole.js";
@@ -30,4 +30,9 @@ export const setupAdminRoutes = (app: Express, basePath: string) => {
requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
revokeRoles,
);
// run the sync only on startup
setTimeout(() => {
mainServerSync();
}, 5 * 1000);
};

View File

@@ -1,100 +1,106 @@
import { Router } from "express";
import type { Request, Response } from "express";
import {
insertServerDataSchema,
serverData,
} from "../../../../pkg/db/schema/servers.js";
import { db } from "../../../../pkg/db/db.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import type { DrizzleError } from "drizzle-orm";
import axios from "axios";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { type DrizzleError, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import {
insertServerDataSchema,
serverData,
} from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.post("/", async (req: Request, res: Response) => {
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "admin", subModule: "add server" });
const parsed = insertServerDataSchema.safeParse(req.body);
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "admin", subModule: "add server" });
const parsed = insertServerDataSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ errors: parsed.error.flatten() });
}
if (!parsed.success) {
return res.status(400).json({ errors: parsed.error.flatten() });
}
const { data, error } = await tryCatch(
db
.insert(serverData)
.values(parsed.data)
//.onConflictDoNothing()
.returning({
name: serverData.name,
plantToken: serverData.plantToken,
})
);
const { data, error } = await tryCatch(
db
.insert(serverData)
.values({
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
})
//.onConflictDoNothing()
.returning({
name: serverData.name,
plantToken: serverData.plantToken,
}),
);
if (error) {
const err: DrizzleError = error;
return res.status(400).json({
message: `Error adding the server`,
error: err.cause,
});
}
if (error) {
const err: DrizzleError = error;
return res.status(400).json({
message: `Error adding the server`,
error: err.cause,
});
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com"
withCredentials: true,
});
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com"
withCredentials: true,
});
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
}
)) as any;
const setCookie = loginRes.headers["set-cookie"][0];
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
const setCookie = loginRes.headers["set-cookie"][0];
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
axios.post(
`${process.env.MAIN_SERVER}/lst/api/admin/server`,
parsed.data,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
}
)
);
const { data, error } = await tryCatch(
axios.post(
`${process.env.MAIN_SERVER}/lst/api/admin/server`,
parsed.data,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
},
),
);
if (error) {
log.error(
{ stack: error },
"There was an error adding the server to Main Server"
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server."
);
}
if (error) {
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
return res
.status(201)
.json({ message: `Server ${data[0]?.name} added`, data: data });
return res
.status(201)
.json({ message: `Server ${data[0]?.name} added`, data: data });
});
export default router;

View File

@@ -1,139 +1,141 @@
import { Router } from "express";
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { eq } from "drizzle-orm";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import axios from "axios";
import { createLogger } from "../../../../pkg/logger/logger.js";
import https from "https";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:token", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update server" });
const log = createLogger({ module: "admin", subModule: "update server" });
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token;
const updates: Record<string, any> = {};
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token;
const updates: Record<string, any> = {};
if (req.body?.name !== undefined) {
updates.name = req.body.name;
}
if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS;
}
if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress;
}
if (req.body?.name !== undefined) {
updates.name = req.body.name;
}
if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS;
}
if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress;
}
if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
}
if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
}
if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort;
}
if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort;
}
if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc;
}
if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc;
}
if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress;
}
if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress;
}
if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState;
}
if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState;
}
if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode;
}
if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode;
}
if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail;
}
if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail;
}
if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone;
}
if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone;
}
if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc;
}
if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc;
}
if (req.body?.active !== undefined) {
updates.active = req.body.active;
}
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.set(updates)
.where(eq(serverData.plantToken, token));
}
if (req.body?.active !== undefined) {
updates.active = req.body.active;
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
}
)) as any;
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.set(updates)
.where(eq(serverData.plantToken, token));
}
const setCookie = loginRes?.headers["set-cookie"][0];
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
if (!setCookie) {
throw new Error(
"Did not receive a Set-Cookie header from login"
);
}
const setCookie = loginRes?.headers["set-cookie"][0];
const { data, error } = await tryCatch(
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
}
)
);
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server"
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server."
);
}
res.status(200).json({ message: `${token} Server was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });
}
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
},
),
);
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });
}
});
export default router;

View File

@@ -0,0 +1,133 @@
/**
* we want to be able to preprint labels from finished lots.
* we will need a lot number
* machine
* printer
* qty will come over as one by default
* copies will come over as 0 by default
* layout
*/
import { createLogger } from "../../../../pkg/logger/logger.js";
import { delay } from "../../../../pkg/utils/delay.js";
import { prodEndpoint } from "../../../../pkg/utils/prodEndpoint.js";
import type { returnFunc } from "../../../../pkg/utils/return.js";
export type Preprint = {
scannerId: number;
lotNr: number;
machineId: number;
printerId: number;
layoutId: number;
numberOfCopies: number;
qtyToPrint: number;
};
export const preprintLabels = async (preprint: Preprint) => {
const log = createLogger({
module: "logistics",
subModule: "preprint",
});
let x = 0;
const labelsPrinted: number[] = [];
if (preprint.qtyToPrint > 1) {
do {
const labels = await prodEndpoint(
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
},
);
if (labels?.data.Result === 1) {
log.error(
{},
`There was an error printing the label: ${labels.data.Message}`,
);
return {
success: false,
message: `${labels.data.Message}`,
};
} else {
if (!labels?.success) {
log.error(
{ error: labels?.data },
`There was an error printing the label`,
);
return {
success: false,
message: `${labels?.message}`,
data: labels?.data,
};
}
labelsPrinted.push(parseInt(labels?.data.SSCC.slice(10, -1)));
log.info(
{},
`Label just created ${parseInt(labels?.data.SSCC.slice(10, -1))} and printed, remaining to print ${preprint.qtyToPrint - x}`,
);
}
await delay(250);
x++;
} while (x < preprint.qtyToPrint);
return {
success: true,
message: `${preprint.qtyToPrint} were just printed`,
data: labelsPrinted,
};
} else {
const labels = await prodEndpoint(
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
},
);
if (labels?.data.Result === 1) {
log.error(
{},
`There was an error printing the label: ${labels.data.Message}`,
);
return {
success: false,
message: `${labels.data.Message}`,
};
} else {
if (!labels?.success) {
log.error(
{ error: labels?.data },
`There was an error printing the label`,
);
return {
success: false,
message: `${labels?.message}`,
data: labels?.data,
};
}
labelsPrinted.push(parseInt(labels.data.SSCC.slice(10, -1)));
log.info(
{},
`Label just created ${parseInt(labels.data.SSCC.slice(10, -1))} and printed`,
);
return {
success: true,
message: `${preprint.qtyToPrint} were just printed.`,
data: labelsPrinted,
};
}
}
};

View File

@@ -50,7 +50,7 @@ export const schedulerManager = async () => {
//console.log(data);
if (orderData.length === 0) {
log.info({}, "There are no new orders or incoming to be updated");
log.debug({}, "There are no new orders or incoming to be updated");
return;
}

View File

@@ -1,16 +1,17 @@
import type { Express, Request, Response } from "express";
import { requireAuth } from "../../pkg/middleware/authMiddleware.js";
import labeling from "./routes/labeling/labelingRoutes.js";
import schedule from "./routes/scheduler/scheduleRoutes.js";
export const setupLogisticsRoutes = (app: Express, basePath: string) => {
app.use(basePath + "/api/logistics/schedule", schedule);
app.use(basePath + "/api/logistics/schedule", schedule);
app.use(basePath + "/api/logistics/labeling", labeling);
app.use(
basePath + "/api/admin/users",
requireAuth("user", ["systemAdmin"]) // will pass bc system admin but this is just telling us we need this
);
app.use(
basePath + "/api/admin",
requireAuth("user", ["systemAdmin", "admin"]) // will pass bc system admin but this is just telling us we need this
);
// app.use(
// basePath + "/api/admin/users",
// requireAuth("user", ["systemAdmin"]), // will pass bc system admin but this is just telling us we need this
// );
// app.use(
// basePath + "/api/admin",
// requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
// );
};

View File

@@ -0,0 +1,8 @@
import { Router } from "express";
import preprint from "./perprint.js";
const router = Router();
router.use("/", preprint);
export default router;

View File

@@ -0,0 +1,27 @@
import type { Request, Response } from "express";
import { Router } from "express";
import z from "zod";
import { preprintLabels } from "../../controller/labeling/preprint.js";
export const Preprint = z.object({
scannerId: z.number(),
lotNr: z.number(),
machineId: z.number(), // 457=22, 458=23
printerId: z.number(), // 457=22, 458=23
layoutId: z.number(),
numberOfCopies: z.number(),
qtyToPrint: z.number().default(1),
});
const router = Router();
router.post("/preprint", async (req: Request, res: Response) => {
const parsed = Preprint.safeParse(req.body);
const print = await preprintLabels(req.body);
res
.status(200)
.json({ success: print.success, message: print.message, data: print.data });
});
export default router;

View File

@@ -0,0 +1,37 @@
/**
* will be all the base settings so we dont have to remember to add ever new setting in these will be the defaults
*/
import { readFileSync } from "fs";
import path from "path";
import { fileURLToPath } from "url";
import { db } from "../../../../pkg/db/db.js";
import { settings } from "../../../../pkg/db/schema/settings.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
export const baseSettings = async () => {
const log = createLogger({ module: "system", subModule: "base settings" });
const settingsPath = path.resolve(__dirname, "./settings.json");
const newSettings = JSON.parse(readFileSync(settingsPath, "utf-8"));
const { data, error } = await tryCatch(
db
.insert(settings)
.values(newSettings)
.onConflictDoNothing()
.returning({ name: settings.name }),
);
if (error) {
log.error({ error }, "There was an error adding new settings");
}
if (data) {
log.info({ newSettingsAdded: data }, "New settings added");
}
};

View File

@@ -0,0 +1,16 @@
[
{
"name": "plantToken",
"value": "test3",
"description": "The plant token for the plant IE: test3 or usday1",
"moduleName": "system",
"roles": ["systemAdmin"]
},
{
"name": "dbServer",
"value": "usmcd1vms036",
"description": "What is the db server",
"moduleName": "system",
"roles": ["systemAdmin"]
}
]

View File

@@ -1,6 +1,12 @@
import type { Express, Request, Response } from "express";
import settings from "./routes/settings/settingRoutes.js";
import stats from "./routes/stats.js";
export const setupSystemRoutes = (app: Express, basePath: string) => {
app.use(basePath + "/api/system/stats", stats);
app.use(basePath + "/api/system/stats", stats);
app.use(
basePath + "/api/system/settings", // will pass bc system admin but this is just telling us we need this
settings,
);
};

View File

@@ -0,0 +1,35 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { and, asc, eq } from "drizzle-orm";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const token = req.query.token;
const conditions = [];
if (token !== undefined) {
conditions.push(eq(serverData.plantToken, `${token}`));
}
conditions.push(eq(serverData.active, true));
const { data, error } = await tryCatch(
db
.select()
.from(serverData)
.where(and(...conditions))
.orderBy(asc(serverData.name))
);
if (error) {
return res.status(400).json({ error: error });
}
res.status(200).json({ message: "Current Active server", data: data });
});
export default router;

View File

@@ -0,0 +1,11 @@
import { Router } from "express";
import { requireAuth } from "../../../../pkg/middleware/authMiddleware.js";
import getSettings from "./getSettings.js";
import updateSetting from "./updateSetting.js";
const router = Router();
router.use("/", getSettings);
router.use("/", requireAuth("system", ["systemAdmin", "admin"]), updateSetting);
export default router;

View File

@@ -0,0 +1,141 @@
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:token", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update server" });
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token;
const updates: Record<string, any> = {};
if (req.body?.name !== undefined) {
updates.name = req.body.name;
}
if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS;
}
if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress;
}
if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
}
if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort;
}
if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc;
}
if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress;
}
if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState;
}
if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode;
}
if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail;
}
if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone;
}
if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc;
}
if (req.body?.active !== undefined) {
updates.active = req.body.active;
}
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.set(updates)
.where(eq(serverData.plantToken, token));
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
const setCookie = loginRes?.headers["set-cookie"][0];
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
},
),
);
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });
}
});
export default router;

View File

@@ -1,36 +1,40 @@
import { format } from "date-fns-tz";
import { eq } from "drizzle-orm";
import { Router } from "express";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { db } from "../../../pkg/db/db.js";
import {
serverStats,
type ServerStats,
type ServerStats,
serverStats,
} from "../../../pkg/db/schema/serverstats.js";
import { eq } from "drizzle-orm";
import { format } from "date-fns-tz";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { checkBuildUpdate } from "../utlis/checkForBuild.js";
const router = Router();
// GET /health
router.get("/", async (req, res) => {
const { data, error } = await tryCatch(
db.select().from(serverStats).where(eq(serverStats.id, "serverStats"))
);
const { data, error } = await tryCatch(
db.select().from(serverStats).where(eq(serverStats.id, "serverStats")),
);
if (error || !data) {
res.status(400).json({ error: error });
}
if (error || !data) {
res.status(400).json({ error: error });
}
const statData = data as ServerStats[];
res.json({
status: "ok",
uptime: process.uptime(),
build: statData[0]?.build,
pendingUpdateFile: await checkBuildUpdate(["."]),
lastUpdate: statData[0]?.lastUpdate
? format(statData[0].lastUpdate, "MM/dd/yyyy HH:mm")
: "",
});
const statData = data as ServerStats[];
const used = process.memoryUsage();
res.json({
status: "ok",
uptime: process.uptime(),
build: statData[0]?.build,
pendingUpdateFile: await checkBuildUpdate(["."]),
lastUpdate: statData[0]?.lastUpdate
? format(statData[0].lastUpdate, "MM/dd/yyyy HH:mm")
: "",
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024
).toFixed(2)} MB`,
});
});
export default router;

View File

@@ -0,0 +1,27 @@
import {
jsonb,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
} from "drizzle-orm/pg-core";
export const prodPermissions = pgTable(
"prodPermissions",
{
prodPerm_id: uuid("prodPerm_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
description: text("description").notNull(),
roles: jsonb("roles").default([]),
rolesLegacy: jsonb("rolesLegacy").default([]),
add_User: text("add_User").default("LST_System").notNull(),
add_Date: timestamp("add_Date").defaultNow(),
upd_user: text("upd_User").default("LST_System").notNull(),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("prodPermName").on(table.name),
],
);

View File

@@ -1,49 +1,53 @@
import {
boolean,
integer,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
boolean,
integer,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
} from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import z from "zod";
export const serverData = pgTable(
"serverData",
{
server_id: uuid("server_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
serverDNS: text("serverDNS").notNull(),
plantToken: text("plantToken").notNull(),
ipAddress: text("ipAddress").notNull(),
greatPlainsPlantCode: integer("greatPlainsPlantCode").notNull(),
streetAddress: text("streetAddress"),
cityState: text("cityState"),
zipcode: integer("zipcode"),
contactEmail: text("contactEmail"),
contactPhone: text("contactPhone"),
customerTiAcc: text("customerTiAcc"),
lstServerPort: integer("lstServerPort").notNull(),
active: boolean("active").default(true),
serverLoc: text("serverLoc").notNull(),
lastUpdated: timestamp("lastUpdated").defaultNow(),
isUpgrading: boolean("isUpgrading").default(false),
},
"serverData",
{
server_id: uuid("server_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
serverDNS: text("serverDNS").notNull(),
plantToken: text("plantToken").notNull(),
ipAddress: text("ipAddress").notNull(),
greatPlainsPlantCode: integer("greatPlainsPlantCode").notNull(),
streetAddress: text("streetAddress"),
cityState: text("cityState"),
zipcode: integer("zipcode"),
contactEmail: text("contactEmail"),
contactPhone: text("contactPhone"),
customerTiAcc: text("customerTiAcc"),
lstServerPort: integer("lstServerPort").notNull(),
active: boolean("active").default(true),
serverLoc: text("serverLoc").notNull(),
lastUpdated: timestamp("lastUpdated").defaultNow(),
isUpgrading: boolean("isUpgrading").default(false),
add_user: text("add_user").default("lst_user"),
add_date: timestamp("add_date").defaultNow(),
upd_user: text("upd_user").default("lst_user"),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("plantToken").on(table.plantToken),
]
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("plantToken").on(table.plantToken),
],
);
export const selectServerDataSchema = createSelectSchema(serverData);
export const insertServerDataSchema = createInsertSchema(serverData).extend({
contactEmail: z.email().optional(),
// zipcode: z
// .string()
// .regex(/^\d{5}$/)
// .optional(),
contactEmail: z.email().optional(),
// zipcode: z
// .string()
// .regex(/^\d{5}$/)
// .optional(),
});

View File

@@ -0,0 +1,70 @@
import { Client } from "pg";
import { createLogger } from "./logger.js";
type NewLog = {
level: string;
username: string;
service: string;
message: string;
checked: boolean;
add_Date: Date;
};
export const v1Listener = async () => {
const log = createLogger({ module: "logger", subModule: "Old logging app" });
const client = new Client({
connectionString: process.env.DATABASE_URL_V1,
});
await client.connect();
// the notify channel to listen for logs on
const channels = ["logs_channel", "users_channel", "orders_channel"];
for (const ch of channels) {
await client.query(`LISTEN ${ch}`);
}
console.log("Listening for:", channels.join(", "));
// create the log function to be able to mimic what is coming over
const logEvent = (newLog: string) => {
const newLogEvent: NewLog = JSON.parse(newLog);
switch (newLogEvent.level) {
case "info":
log.info(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
break;
case "error":
log.error(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
break;
default:
log.info(
{ username: newLogEvent.username, service: newLogEvent.service },
newLogEvent.message,
);
}
};
client.on("notification", (msg) => {
// msg.channel tells which channel it came from
// msg.payload is whatever message you sent from the trigger
switch (msg.channel) {
case "logs_channel":
logEvent(msg.payload || "");
break;
case "users_channel":
console.log("👤 User event:", msg.payload);
break;
case "orders_channel":
console.log("🛒 Order event:", msg.payload);
break;
default:
console.log("Other event:", msg);
}
});
};

View File

@@ -1,90 +1,92 @@
import type { Request, Response, NextFunction } from "express";
import { auth } from "../auth/auth.js";
import { userRoles, type UserRole } from "../db/schema/user_roles.js";
import { db } from "../db/db.js";
import { eq } from "drizzle-orm";
import type { NextFunction, Request, Response } from "express";
import { auth } from "../auth/auth.js";
import { db } from "../db/db.js";
import { type UserRole, userRoles } from "../db/schema/user_roles.js";
declare global {
namespace Express {
interface Request {
user?: {
id: string;
email?: string;
roles: Record<string, string[]>;
};
}
}
namespace Express {
interface Request {
user?: {
id: string;
email?: string;
roles: Record<string, string[]>;
username?: string | null;
};
}
}
}
function toWebHeaders(nodeHeaders: Request["headers"]): Headers {
const h = new Headers();
for (const [key, value] of Object.entries(nodeHeaders)) {
if (Array.isArray(value)) {
value.forEach((v) => h.append(key, v));
} else if (value !== undefined) {
h.set(key, value);
}
}
return h;
const h = new Headers();
for (const [key, value] of Object.entries(nodeHeaders)) {
if (Array.isArray(value)) {
value.forEach((v) => h.append(key, v));
} else if (value !== undefined) {
h.set(key, value);
}
}
return h;
}
export const requireAuth = (moduleName?: string, requiredRoles?: string[]) => {
return async (req: Request, res: Response, next: NextFunction) => {
try {
const headers = toWebHeaders(req.headers);
return async (req: Request, res: Response, next: NextFunction) => {
try {
const headers = toWebHeaders(req.headers);
// Get session
const session = await auth.api.getSession({
headers,
query: { disableCookieCache: true },
});
// Get session
const session = await auth.api.getSession({
headers,
query: { disableCookieCache: true },
});
if (!session) {
return res.status(401).json({ error: "No active session" });
}
if (!session) {
return res.status(401).json({ error: "No active session" });
}
const userId = session.user.id;
const userId = session.user.id;
// Get roles
const roles = await db
.select()
.from(userRoles)
.where(eq(userRoles.userId, userId));
// Get roles
const roles = await db
.select()
.from(userRoles)
.where(eq(userRoles.userId, userId));
// Organize roles by module
const rolesByModule: Record<string, string[]> = {};
for (const r of roles) {
if (!rolesByModule[r.module]) rolesByModule[r.module] = [];
rolesByModule[r.module].push(r.role);
}
// Organize roles by module
const rolesByModule: Record<string, string[]> = {};
for (const r of roles) {
if (!rolesByModule[r.module]) rolesByModule[r.module] = [];
rolesByModule[r.module].push(r.role);
}
req.user = {
id: userId,
email: session.user.email,
roles: rolesByModule,
};
req.user = {
id: userId,
email: session.user.email,
roles: rolesByModule,
username: session.user.username,
};
// SystemAdmin override
const hasSystemAdmin = Object.values(rolesByModule)
.flat()
.includes("systemAdmin");
// SystemAdmin override
const hasSystemAdmin = Object.values(rolesByModule)
.flat()
.includes("systemAdmin");
// Role check (skip if systemAdmin)
if (requiredRoles?.length && !hasSystemAdmin) {
const moduleRoles = moduleName
? rolesByModule[moduleName] ?? []
: Object.values(rolesByModule).flat();
const hasAccess = moduleRoles.some((role) =>
requiredRoles.includes(role)
);
if (!hasAccess) {
return res.status(403).json({ error: "Forbidden" });
}
}
// Role check (skip if systemAdmin)
if (requiredRoles?.length && !hasSystemAdmin) {
const moduleRoles = moduleName
? (rolesByModule[moduleName] ?? [])
: Object.values(rolesByModule).flat();
const hasAccess = moduleRoles.some((role) =>
requiredRoles.includes(role),
);
if (!hasAccess) {
return res.status(403).json({ error: "Forbidden" });
}
}
next();
} catch (err) {
console.error("Auth middleware error:", err);
res.status(500).json({ error: "Auth check failed" });
}
};
next();
} catch (err) {
console.error("Auth middleware error:", err);
res.status(500).json({ error: "Auth check failed" });
}
};
};

View File

@@ -0,0 +1,8 @@
import axios from "axios";
import https from "https";
export const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});

View File

@@ -0,0 +1,3 @@
export const delay = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};

View File

@@ -0,0 +1,90 @@
/**
* This will fire the endpoints
* we only need the endpoint url grabbed from swagger
* and the data to be passed over
*/
import axios, { type Method } from "axios";
import { eq } from "drizzle-orm";
import https from "https";
import { db } from "../db/db.js";
import { settings } from "../db/schema/settings.js";
import { createLogger } from "../logger/logger.js";
import { tryCatch } from "./tryCatch.js";
// create the test server stuff
const testServers = [
{ token: "test1", port: 8940 },
{ token: "test2", port: 8941 },
{ token: "test3", port: 8942 },
];
export const prodEndpoint = async <T>(
method: Method,
endpoint: string,
data?: T,
) => {
const log = createLogger({ module: "pkg", subModule: "prodEndpoints" });
// example url "https://usmcd1vms036.alpla.net:8942/application/public/v1.0/DemandManagement/ORDERS"
let url = "";
// as a reminder when we look for specific like below it will come over as an array this is due to more than one item could be found
const plantToken = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
const testServer = testServers.some(
(server) => server.token === plantToken[0]?.value,
);
const server = await db
.select()
.from(settings)
.where(eq(settings.name, "dbServer"));
if (testServer) {
//filter out what testserver we are
const test = testServers.filter((t) => t.token === plantToken[0].value);
url = `https://${server[0]?.value}.alpla.net:${test[0]?.port}/application${endpoint}`;
} else {
url = `https://${plantToken[0]?.value}prod.alpla.net/application${endpoint}`;
}
// create the axio instance
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
withCredentials: true,
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
const { data: api, error: apiError } = (await tryCatch(
axiosInstance({
method,
url: url,
...(data && { data }),
}),
)) as any;
if (apiError) {
log.error(
{ error: apiError?.response?.data },
"There was an error running the endpoint",
);
return {
success: false,
message: "There was an error processing the endpoint",
data: apiError.response.data,
};
}
if (api) {
return {
success: true,
message: "Prod endpoint processed",
data: api.data,
};
}
};

View File

@@ -1,56 +1,55 @@
import * as React from "react"
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area"
import { cn } from "@/lib/utils"
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area";
import * as React from "react";
import { cn } from "../../lib/utils";
function ScrollArea({
className,
children,
...props
className,
children,
...props
}: React.ComponentProps<typeof ScrollAreaPrimitive.Root>) {
return (
<ScrollAreaPrimitive.Root
data-slot="scroll-area"
className={cn("relative", className)}
{...props}
>
<ScrollAreaPrimitive.Viewport
data-slot="scroll-area-viewport"
className="focus-visible:ring-ring/50 size-full rounded-[inherit] transition-[color,box-shadow] outline-none focus-visible:ring-[3px] focus-visible:outline-1"
>
{children}
</ScrollAreaPrimitive.Viewport>
<ScrollBar />
<ScrollAreaPrimitive.Corner />
</ScrollAreaPrimitive.Root>
)
return (
<ScrollAreaPrimitive.Root
data-slot="scroll-area"
className={cn("relative", className)}
{...props}
>
<ScrollAreaPrimitive.Viewport
data-slot="scroll-area-viewport"
className="focus-visible:ring-ring/50 size-full rounded-[inherit] transition-[color,box-shadow] outline-none focus-visible:ring-[3px] focus-visible:outline-1"
>
{children}
</ScrollAreaPrimitive.Viewport>
<ScrollBar />
<ScrollAreaPrimitive.Corner />
</ScrollAreaPrimitive.Root>
);
}
function ScrollBar({
className,
orientation = "vertical",
...props
className,
orientation = "vertical",
...props
}: React.ComponentProps<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>) {
return (
<ScrollAreaPrimitive.ScrollAreaScrollbar
data-slot="scroll-area-scrollbar"
orientation={orientation}
className={cn(
"flex touch-none p-px transition-colors select-none",
orientation === "vertical" &&
"h-full w-2.5 border-l border-l-transparent",
orientation === "horizontal" &&
"h-2.5 flex-col border-t border-t-transparent",
className
)}
{...props}
>
<ScrollAreaPrimitive.ScrollAreaThumb
data-slot="scroll-area-thumb"
className="bg-border relative flex-1 rounded-full"
/>
</ScrollAreaPrimitive.ScrollAreaScrollbar>
)
return (
<ScrollAreaPrimitive.ScrollAreaScrollbar
data-slot="scroll-area-scrollbar"
orientation={orientation}
className={cn(
"flex touch-none p-px transition-colors select-none",
orientation === "vertical" &&
"h-full w-2.5 border-l border-l-transparent",
orientation === "horizontal" &&
"h-2.5 flex-col border-t border-t-transparent",
className,
)}
{...props}
>
<ScrollAreaPrimitive.ScrollAreaThumb
data-slot="scroll-area-thumb"
className="bg-border relative flex-1 rounded-full"
/>
</ScrollAreaPrimitive.ScrollAreaScrollbar>
);
}
export { ScrollArea, ScrollBar }
export { ScrollArea, ScrollBar };

View File

@@ -1,114 +1,113 @@
import * as React from "react"
import { cn } from "@/lib/utils"
import * as React from "react";
import { cn } from "../../lib/utils";
function Table({ className, ...props }: React.ComponentProps<"table">) {
return (
<div
data-slot="table-container"
className="relative w-full overflow-x-auto"
>
<table
data-slot="table"
className={cn("w-full caption-bottom text-sm", className)}
{...props}
/>
</div>
)
return (
<div
data-slot="table-container"
className="relative w-full overflow-x-auto"
>
<table
data-slot="table"
className={cn("w-full caption-bottom text-sm", className)}
{...props}
/>
</div>
);
}
function TableHeader({ className, ...props }: React.ComponentProps<"thead">) {
return (
<thead
data-slot="table-header"
className={cn("[&_tr]:border-b", className)}
{...props}
/>
)
return (
<thead
data-slot="table-header"
className={cn("[&_tr]:border-b", className)}
{...props}
/>
);
}
function TableBody({ className, ...props }: React.ComponentProps<"tbody">) {
return (
<tbody
data-slot="table-body"
className={cn("[&_tr:last-child]:border-0", className)}
{...props}
/>
)
return (
<tbody
data-slot="table-body"
className={cn("[&_tr:last-child]:border-0", className)}
{...props}
/>
);
}
function TableFooter({ className, ...props }: React.ComponentProps<"tfoot">) {
return (
<tfoot
data-slot="table-footer"
className={cn(
"bg-muted/50 border-t font-medium [&>tr]:last:border-b-0",
className
)}
{...props}
/>
)
return (
<tfoot
data-slot="table-footer"
className={cn(
"bg-muted/50 border-t font-medium [&>tr]:last:border-b-0",
className,
)}
{...props}
/>
);
}
function TableRow({ className, ...props }: React.ComponentProps<"tr">) {
return (
<tr
data-slot="table-row"
className={cn(
"hover:bg-muted/50 data-[state=selected]:bg-muted border-b transition-colors",
className
)}
{...props}
/>
)
return (
<tr
data-slot="table-row"
className={cn(
"hover:bg-muted/50 data-[state=selected]:bg-muted border-b transition-colors",
className,
)}
{...props}
/>
);
}
function TableHead({ className, ...props }: React.ComponentProps<"th">) {
return (
<th
data-slot="table-head"
className={cn(
"text-foreground h-10 px-2 text-left align-middle font-medium whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className
)}
{...props}
/>
)
return (
<th
data-slot="table-head"
className={cn(
"text-foreground h-10 px-2 text-left align-middle font-medium whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className,
)}
{...props}
/>
);
}
function TableCell({ className, ...props }: React.ComponentProps<"td">) {
return (
<td
data-slot="table-cell"
className={cn(
"p-2 align-middle whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className
)}
{...props}
/>
)
return (
<td
data-slot="table-cell"
className={cn(
"p-2 align-middle whitespace-nowrap [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]",
className,
)}
{...props}
/>
);
}
function TableCaption({
className,
...props
className,
...props
}: React.ComponentProps<"caption">) {
return (
<caption
data-slot="table-caption"
className={cn("text-muted-foreground mt-4 text-sm", className)}
{...props}
/>
)
return (
<caption
data-slot="table-caption"
className={cn("text-muted-foreground mt-4 text-sm", className)}
{...props}
/>
);
}
export {
Table,
TableHeader,
TableBody,
TableFooter,
TableHead,
TableRow,
TableCell,
TableCaption,
}
Table,
TableHeader,
TableBody,
TableFooter,
TableHead,
TableRow,
TableCell,
TableCaption,
};

View File

@@ -10,7 +10,7 @@ interface ShipmentItemProps {
export function ShipmentItem({
shipment,
index = 0,
perm = true,
//perm = true,
}: ShipmentItemProps) {
const { setNodeRef, listeners, attributes, transform } = useDraggable({
id: shipment.orderNumber,

View File

@@ -1,9 +1,7 @@
import { createFileRoute } from "@tanstack/react-router";
import { useEffect, useState } from "react";
import { coreSocket } from "../../../lib/socket.io/socket";
import "../-components/style.css";
import moment from "moment";
import Timeline from "react-calendar-timeline";
export const Route = createFileRoute("/(logistics)/logistics/deliverySchedule")(
{
@@ -19,9 +17,9 @@ export const Route = createFileRoute("/(logistics)/logistics/deliverySchedule")(
function RouteComponent() {
// connect to the channel
const [shipments, setShipments] = useState([]) as any;
//const [shipments, setShipments] = useState([]) as any;
//const [perm] = useState(true); // will check this for sure with a user permissions
const [loaded, setLoaded] = useState(false);
//const [loaded, setLoaded] = useState(false);
// useEffect(() => {
// const handleConnect = () => {

View File

@@ -11,7 +11,7 @@ import { useEffect } from "react";
import { Toaster } from "sonner";
import Nav from "../components/navBar/Nav";
import SideBarNav from "../components/navBar/SideBarNav";
import { SidebarProvider, SidebarTrigger } from "../components/ui/sidebar";
import { SidebarProvider } from "../components/ui/sidebar";
import { userAccess } from "../lib/authClient";
import { SessionGuard } from "../lib/providers/SessionProvider";
import { ThemeProvider } from "../lib/providers/theme-provider";

View File

@@ -91,6 +91,10 @@ function RouteComponent() {
);
},
}),
// password reset will do the email flow
// change password an input for this one so well need inline editing for this dope one
// trash can to delete user
// last login -- need to get working on the server side as well.
columnHelper.accessor("roles", {
header: () => <span>Roles</span>,
cell: ({ row }) => {

View File

@@ -0,0 +1,12 @@
CREATE OR REPLACE FUNCTION notify_new_log()
RETURNS trigger AS $$
BEGIN
PERFORM pg_notify('logs_channel', row_to_json(NEW)::text);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER logs_notify_trigger
AFTER INSERT ON logs
FOR EACH ROW
EXECUTE FUNCTION notify_new_log();

View File

@@ -1,97 +1,97 @@
{
"name": "lstv2",
"version": "2.27.0",
"type": "module",
"scripts": {
"dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"",
"dev:server": "dotenvx run -f .env -- tsx watch server/index.ts",
"dev:frontend": "cd frontend && npm run dev",
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server && npm run build:frontend",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y",
"build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts",
"copy:servers": "xcopy server\\services\\server\\utils\\serverData.json dist\\server\\services\\server\\utils /E /I /Y",
"start": "set NODE_ENV=production && npm run start:server",
"start:server": "dotenvx run -f .env -- node dist/server/index.js",
"db:generate": "npx drizzle-kit generate",
"db:migrate": "npx drizzle-kit push",
"db:dev": "npm run build && npm run db:generate && npm run db:migrate",
"deploy": "standard-version --conventional-commits && npm run build",
"zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"newBuild": "npm run build:server && npm run build:frontend",
"copyToNew": "powershell -ExecutionPolicy Bypass -File server/scripts/copyToLst.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"removeOld": "rimraf dist && rimraf frontend/dist",
"prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev",
"commit": "cz",
"prodinstall": "npm i --omit=dev && npm run db:migrate",
"checkupdates": "npx npm-check-updates",
"testingCode": "dotenvx run -f .env -- tsx watch server/services/logistics/controller/warehouse/cycleCountChecks/cyclecountCheck.ts"
},
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
}
},
"admConfig": {
"build": 661,
"oldBuild": "backend-0.1.3.zip"
},
"devDependencies": {
"@types/adm-zip": "^0.5.7",
"@types/bcrypt": "^5.0.2",
"@types/fs-extra": "^11.0.4",
"@types/js-cookie": "^3.0.6",
"@types/mssql": "^9.1.7",
"@types/node": "^24.0.3",
"@types/node-cron": "^3.0.11",
"@types/nodemailer": "^6.4.17",
"@types/pg": "^8.15.4",
"@types/ws": "^8.18.1",
"concurrently": "^9.1.2",
"cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0",
"tsx": "^4.20.3",
"typescript": "^5.8.3"
},
"dependencies": {
"@dotenvx/dotenvx": "^1.45.1",
"@hono/node-server": "^1.14.4",
"@hono/zod-openapi": "^0.19.8",
"@scalar/hono-api-reference": "^0.9.5",
"@tanstack/react-form": "^1.12.3",
"@tanstack/react-table": "^8.21.3",
"@types/jsonwebtoken": "^9.0.10",
"@types/nodemailer-express-handlebars": "^4.0.5",
"adm-zip": "^0.5.16",
"axios": "^1.10.0",
"bcryptjs": "^3.0.2",
"croner": "^9.1.0",
"date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0",
"drizzle-kit": "^0.31.1",
"drizzle-orm": "^0.44.2",
"drizzle-zod": "^0.8.2",
"excel-date-to-js": "^1.1.5",
"fast-xml-parser": "^5.2.5",
"fs-extra": "^11.3.0",
"jsonwebtoken": "^9.0.2",
"morgan": "^1.10.1",
"mssql": "^11.0.1",
"nodemailer": "^7.0.3",
"nodemailer-express-handlebars": "^7.0.0",
"pg": "^8.16.2",
"pino": "^9.7.0",
"pino-abstract-transport": "^2.0.0",
"pino-pretty": "^13.0.0",
"postgres": "^3.4.7",
"react-resizable-panels": "^3.0.3",
"rimraf": "^6.0.1",
"st-ethernet-ip": "^2.7.5",
"ws": "^8.18.2",
"xlsx": "^0.18.5",
"zod": "^3.25.67"
}
"name": "lstv2",
"version": "2.27.0",
"type": "module",
"scripts": {
"dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"",
"dev:server": "dotenvx run -f .env -- tsx watch server/index.ts",
"dev:frontend": "cd frontend && npm run dev",
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server && npm run build:frontend",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ",
"build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts",
"copy:servers": "xcopy server\\services\\server\\utils\\serverData.json dist\\server\\services\\server\\utils /E /I /Y",
"start": "set NODE_ENV=production && npm run start:server",
"start:server": "dotenvx run -f .env -- node dist/server/index.js",
"db:generate": "npx drizzle-kit generate",
"db:migrate": "npx drizzle-kit push",
"db:dev": "npm run build && npm run db:generate && npm run db:migrate",
"deploy": "standard-version --conventional-commits && npm run build",
"zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"newBuild": "npm run build:server && npm run build:frontend",
"copyToNew": "powershell -ExecutionPolicy Bypass -File server/scripts/copyToLst.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"removeOld": "rimraf dist && rimraf frontend/dist",
"prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev",
"commit": "cz",
"prodinstall": "npm i --omit=dev && npm run db:migrate",
"checkupdates": "npx npm-check-updates",
"testingCode": "dotenvx run -f .env -- tsx watch server/services/logistics/controller/warehouse/cycleCountChecks/cyclecountCheck.ts"
},
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
}
},
"admConfig": {
"build": 661,
"oldBuild": "backend-0.1.3.zip"
},
"devDependencies": {
"@types/adm-zip": "^0.5.7",
"@types/bcrypt": "^5.0.2",
"@types/fs-extra": "^11.0.4",
"@types/js-cookie": "^3.0.6",
"@types/mssql": "^9.1.7",
"@types/node": "^24.0.3",
"@types/node-cron": "^3.0.11",
"@types/nodemailer": "^6.4.17",
"@types/pg": "^8.15.4",
"@types/ws": "^8.18.1",
"concurrently": "^9.1.2",
"cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0",
"tsx": "^4.20.3",
"typescript": "^5.8.3"
},
"dependencies": {
"@dotenvx/dotenvx": "^1.45.1",
"@hono/node-server": "^1.14.4",
"@hono/zod-openapi": "^0.19.8",
"@scalar/hono-api-reference": "^0.9.5",
"@tanstack/react-form": "^1.12.3",
"@tanstack/react-table": "^8.21.3",
"@types/jsonwebtoken": "^9.0.10",
"@types/nodemailer-express-handlebars": "^4.0.5",
"adm-zip": "^0.5.16",
"axios": "^1.10.0",
"bcryptjs": "^3.0.2",
"croner": "^9.1.0",
"date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0",
"drizzle-kit": "^0.31.1",
"drizzle-orm": "^0.44.2",
"drizzle-zod": "^0.8.2",
"excel-date-to-js": "^1.1.5",
"fast-xml-parser": "^5.2.5",
"fs-extra": "^11.3.0",
"jsonwebtoken": "^9.0.2",
"morgan": "^1.10.1",
"mssql": "^11.0.1",
"nodemailer": "^7.0.3",
"nodemailer-express-handlebars": "^7.0.0",
"pg": "^8.16.2",
"pino": "^9.7.0",
"pino-abstract-transport": "^2.0.0",
"pino-pretty": "^13.0.0",
"postgres": "^3.4.7",
"react-resizable-panels": "^3.0.3",
"rimraf": "^6.0.1",
"st-ethernet-ip": "^2.7.5",
"ws": "^8.18.2",
"xlsx": "^0.18.5",
"zod": "^3.25.67"
}
}

View File

@@ -0,0 +1,17 @@
CREATE TABLE "prodPermissions" (
"prodPerm_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text NOT NULL,
"description" text NOT NULL,
"roles" jsonb DEFAULT '[]'::jsonb,
"rolesLegacy" jsonb DEFAULT '[]'::jsonb,
"add_User" text DEFAULT 'LST_System' NOT NULL,
"add_Date" timestamp DEFAULT now(),
"upd_User" text DEFAULT 'LST_System' NOT NULL,
"upd_date" timestamp DEFAULT now()
);
--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "add_user" text DEFAULT 'lst_user';--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "add_date" timestamp DEFAULT now();--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "upd_user" text DEFAULT 'lst_user';--> statement-breakpoint
ALTER TABLE "serverData" ADD COLUMN "upd_date" timestamp DEFAULT now();--> statement-breakpoint
CREATE UNIQUE INDEX "prodPermName" ON "prodPermissions" USING btree ("name");

File diff suppressed because it is too large Load Diff

View File

@@ -134,6 +134,13 @@
"when": 1760480733009,
"tag": "0018_aspiring_silver_samurai",
"breakpoints": true
},
{
"idx": 19,
"version": "7",
"when": 1760623729227,
"tag": "0019_bizarre_tarot",
"breakpoints": true
}
]
}

13
package-lock.json generated
View File

@@ -42,6 +42,7 @@
"@types/node": "^24.7.1",
"@types/nodemailer": "^7.0.2",
"@types/nodemailer-express-handlebars": "^4.0.5",
"@types/pg": "^8.15.5",
"concurrently": "^9.2.1",
"cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0",
@@ -3384,6 +3385,18 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/pg": {
"version": "8.15.5",
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.15.5.tgz",
"integrity": "sha512-LF7lF6zWEKxuT3/OR8wAZGzkg4ENGXFNyiV/JeOt9z5B+0ZVwbql9McqX5c/WStFq1GaGso7H1AzP/qSzmlCKQ==",
"devOptional": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"pg-protocol": "*",
"pg-types": "^2.2.0"
}
},
"node_modules/@types/qs": {
"version": "6.14.0",
"dev": true,

View File

@@ -1,90 +1,91 @@
{
"name": "lst",
"version": "1.5.0",
"description": "Logistics support tool - the place where the support happens.",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"dev:app": "dotenvx run -f .env -- tsx watch app/main.ts",
"dev:docs": "npm run translateDocs && cd lstDocs && npm start",
"dev:front": "cd frontend && npm run dev",
"dev:db:migrate": "npx drizzle-kit push",
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
"dev": "concurrently -n \"server,frontend,docs\" -c \"#007755,#2f6da3,#DB4FE0\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\"",
"copy:docs": "node scripts/lstDocCopy.mjs",
"build:app": "rimraf dist && npx tsc",
"build:front": "cd frontend && rimraf dist && npm run build",
"build:docs": "cd lstDocs && rimraf build && npm run build",
"build:wrapper": "cd lstWrapper && rimraf publish && dotnet publish -c Release -o ./publish",
"build:ctl": " ",
"build": "npm run translateDocs && npm run build:docs && npm run build:front && npm run build:app",
"install:front": "cd frontend && npm i",
"install:docs": "cd lstDocs && npm i",
"install:app": "npm i",
"start:app": "node dist/main.js",
"start": "dotenvx run -f .env -- npm run start:app",
"start:win": "set NODE_ENV=production && node dist/main.js",
"docker": "docker compose up --build --force-recreate -d",
"commit": "cz",
"deploy": "standard-version --conventional-commits && npm run translateDocs && npm run build && cd lstV2 && npm run build",
"db:migrate": "npx drizzle-kit push",
"db:generate": "npx drizzle-kit generate",
"translateDocs": "cd scripts && node translateScript.js",
"auth:generate": "npx @better-auth/cli generate --config ./app/src/pkg/auth/auth.ts",
"updates": "ncu -g"
},
"repository": {
"type": "git",
"url": "https://git.tuffraid.net/cowch/lst.git"
},
"keywords": [],
"author": "",
"license": "ISC",
"type": "module",
"dependencies": {
"@dotenvx/dotenvx": "^1.51.0",
"@tanstack/react-table": "^8.21.3",
"@types/cors": "^2.8.19",
"axios": "^1.12.2",
"better-auth": "^1.3.27",
"cors": "^2.8.5",
"date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0",
"drizzle-kit": "^0.31.5",
"drizzle-orm": "^0.44.6",
"drizzle-zod": "^0.8.3",
"express": "^5.1.0",
"handlebars": "^4.7.8",
"morgan": "^1.10.1",
"mssql": "^12.0.0",
"nodemailer": "^7.0.9",
"nodemailer-express-handlebars": "^7.0.0",
"npm-check-updates": "^19.0.0",
"pg": "^8.16.3",
"pino": "^10.0.0",
"pino-pretty": "^13.1.2",
"postgres": "^3.4.7",
"socket.io": "^4.8.1",
"zod": "^4.1.12"
},
"devDependencies": {
"@biomejs/biome": "2.2.6",
"@types/express": "^5.0.3",
"@types/morgan": "^1.9.10",
"@types/mssql": "^9.1.8",
"@types/node": "^24.7.1",
"@types/nodemailer": "^7.0.2",
"@types/nodemailer-express-handlebars": "^4.0.5",
"concurrently": "^9.2.1",
"cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0",
"ts-node-dev": "^2.0.0",
"tsx": "^4.20.6",
"typescript": "^5.9.3"
},
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
}
}
"name": "lst",
"version": "1.5.0",
"description": "Logistics support tool - the place where the support happens.",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"dev:app": "dotenvx run -f .env -- tsx watch app/main.ts",
"dev:docs": "npm run translateDocs && cd lstDocs && npm start",
"dev:front": "cd frontend && npm run dev",
"dev:db:migrate": "npx drizzle-kit push",
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
"dev": "concurrently -n \"server,frontend,docs\" -c \"#007755,#2f6da3,#DB4FE0\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\"",
"copy:docs": "node scripts/lstDocCopy.mjs",
"build:app": "rimraf dist && npx tsc && xcopy app\\src\\internal\\system\\controller\\settings\\settings.json dist\\src\\internal\\system\\controller\\settings /E /I /Y",
"build:front": "cd frontend && rimraf dist && npm run build",
"build:docs": "cd lstDocs && rimraf build && npm run build",
"build:wrapper": "cd lstWrapper && rimraf publish && dotnet publish -c Release -o ./publish",
"build:ctl": " ",
"build": "npm run translateDocs && npm run build:docs && npm run build:front && npm run build:app",
"install:front": "cd frontend && npm i",
"install:docs": "cd lstDocs && npm i",
"install:app": "npm i",
"start:app": "node dist/main.js",
"start": "dotenvx run -f .env -- npm run start:app",
"start:win": "set NODE_ENV=production && node dist/main.js",
"docker": "docker compose up --build --force-recreate -d",
"commit": "cz",
"deploy": "standard-version --conventional-commits && npm run translateDocs && npm run build && cd lstV2 && npm run build",
"db:migrate": "npx drizzle-kit push",
"db:generate": "npx drizzle-kit generate",
"translateDocs": "cd scripts && node translateScript.js",
"auth:generate": "npx @better-auth/cli generate --config ./app/src/pkg/auth/auth.ts",
"updates": "ncu -g"
},
"repository": {
"type": "git",
"url": "https://git.tuffraid.net/cowch/lst.git"
},
"keywords": [],
"author": "",
"license": "ISC",
"type": "module",
"dependencies": {
"@dotenvx/dotenvx": "^1.51.0",
"@tanstack/react-table": "^8.21.3",
"@types/cors": "^2.8.19",
"axios": "^1.12.2",
"better-auth": "^1.3.27",
"cors": "^2.8.5",
"date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0",
"drizzle-kit": "^0.31.5",
"drizzle-orm": "^0.44.6",
"drizzle-zod": "^0.8.3",
"express": "^5.1.0",
"handlebars": "^4.7.8",
"morgan": "^1.10.1",
"mssql": "^12.0.0",
"nodemailer": "^7.0.9",
"nodemailer-express-handlebars": "^7.0.0",
"npm-check-updates": "^19.0.0",
"pg": "^8.16.3",
"pino": "^10.0.0",
"pino-pretty": "^13.1.2",
"postgres": "^3.4.7",
"socket.io": "^4.8.1",
"zod": "^4.1.12"
},
"devDependencies": {
"@biomejs/biome": "2.2.6",
"@types/express": "^5.0.3",
"@types/morgan": "^1.9.10",
"@types/mssql": "^9.1.8",
"@types/node": "^24.7.1",
"@types/nodemailer": "^7.0.2",
"@types/nodemailer-express-handlebars": "^4.0.5",
"@types/pg": "^8.15.5",
"concurrently": "^9.2.1",
"cz-conventional-changelog": "^3.3.0",
"standard-version": "^9.5.0",
"ts-node-dev": "^2.0.0",
"tsx": "^4.20.6",
"typescript": "^5.9.3"
},
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
}
}
}