feat(admin): moved server build/update to full app
All checks were successful
Build and Push LST Docker Image / docker (push) Successful in 2m27s

This commit is contained in:
2026-04-21 07:36:04 -05:00
parent b832d7aa1e
commit cb00addee9
49 changed files with 15551 additions and 36 deletions

View File

@@ -0,0 +1,38 @@
/**
* To be able to run this we need to set our dev pc in the .env.
* if its empty just ignore it. this will just be the double catch
*/
import { Router } from "express";
import { build, building } from "../utils/build.utils.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
const router = Router();
router.post("/release", async (_, res) => {
if (!building) {
build();
return apiReturn(res, {
success: true,
level: "info",
module: "admin",
subModule: "build",
message: `The build has been triggered see logs for progress of the current build.`,
data: [],
status: 200,
});
} else {
return apiReturn(res, {
success: false,
level: "error",
module: "admin",
subModule: "build",
message: `There is a build in progress already please check the logs for on going progress.`,
data: [],
status: 200,
});
}
});
export default router;

View File

@@ -0,0 +1,12 @@
import type { Express } from "express";
import { requireAuth } from "../middleware/auth.middleware.js";
import build from "./admin.build.js";
import update from "./admin.updateServer.js";
export const setupAdminRoutes = (baseUrl: string, app: Express) => {
//stats will be like this as we dont need to change this
app.use(`${baseUrl}/api/admin/build`, requireAuth, build);
app.use(`${baseUrl}/api/admin/build`, requireAuth, update);
// all other system should be under /api/system/*
};

View File

@@ -0,0 +1,86 @@
/**
* To be able to run this we need to set our dev pc in the .env.
* if its empty just ignore it. this will just be the double catch
*/
import { Router } from "express";
import z from "zod";
import { building } from "../utils/build.utils.js";
import { runUpdate, updating } from "../utils/deployApp.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
const updateServer = z.object({
server: z.string(),
destination: z.string(),
token: z.string().min(5, "Plant tokens should be at least 5 characters long"),
});
const router = Router();
type Update = {
success: boolean;
message: string;
};
router.post("/updateServer", async (req, res) => {
try {
const validated = updateServer.parse(req.body);
if (!updating && !building) {
const update = (await runUpdate({
server: validated.server,
destination: validated.destination,
token: validated.token,
})) as Update;
return apiReturn(res, {
success: update.success,
level: update.success ? "info" : "error",
module: "admin",
subModule: "update",
message: update.message,
data: [],
status: 200,
});
} else {
return apiReturn(res, {
success: false,
level: "error",
module: "admin",
subModule: "update",
message: `${validated.server}: ${validated.token} is already being updated, or is currently building the app.`,
data: [],
status: 200,
});
}
} catch (err) {
if (err instanceof z.ZodError) {
const flattened = z.flattenError(err);
// return res.status(400).json({
// error: "Validation failed",
// details: flattened,
// });
return apiReturn(res, {
success: false,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "auth",
message: "Validation failed",
data: [flattened.fieldErrors],
status: 400, //connect.success ? 200 : 400,
});
}
return apiReturn(res, {
success: false,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "auth",
message: "Internal Server Error creating user",
data: [err],
status: 400, //connect.success ? 200 : 400,
});
}
});
export default router;

View File

@@ -179,6 +179,12 @@ export const runDatamartQuery = async (data: Data) => {
data.options.articles data.options.articles
? `and r.ArticleHumanReadableId in (${data.options.articles})` ? `and r.ArticleHumanReadableId in (${data.options.articles})`
: "--and r.ArticleHumanReadableId in ([articles]) ", : "--and r.ArticleHumanReadableId in ([articles]) ",
)
.replace(
"and DeliveredQuantity > 0",
data.options.all
? "--and DeliveredQuantity > 0"
: "and DeliveredQuantity > 0",
); );
break; break;

View File

@@ -0,0 +1,10 @@
import { integer, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
export const deploymentHistory = pgTable("deployment_history", {
id: uuid("id").defaultRandom().primaryKey(),
serverId: uuid("server_id"),
buildNumber: integer("build_number").notNull(),
status: text("status").notNull(), // started, success, failed
message: text("message"),
createdAt: timestamp("created_at").defaultNow(),
});

View File

@@ -0,0 +1,40 @@
import {
boolean,
integer,
pgTable,
text,
timestamp,
uuid,
} from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import type z from "zod";
export const serverData = pgTable(
"server_data",
{
server_id: uuid("id").defaultRandom().primaryKey(),
name: text("name").notNull(),
server: text("server"),
plantToken: text("plant_token").notNull().unique(),
idAddress: text("id_address"),
greatPlainsPlantCode: text("great_plains_plant_code"),
contactEmail: text("contact_email"),
contactPhone: text("contact_phone"),
active: boolean("active").default(true),
serverLoc: text("server_loc"),
lastUpdated: timestamp("last_updated").defaultNow(),
buildNumber: integer("build_number"),
isUpgrading: boolean("is_upgrading").default(false),
},
// (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("plant_token").on(table.plantToken),
// ],
);
export const serverDataSchema = createSelectSchema(serverData);
export const newServerDataSchema = createInsertSchema(serverData);
export type ServerDataSchema = z.infer<typeof serverDataSchema>;
export type NewServerData = z.infer<typeof newServerDataSchema>;

View File

@@ -1,10 +1,27 @@
import type { InferSelectModel } from "drizzle-orm"; import {
import { integer, pgTable, text, timestamp } from "drizzle-orm/pg-core"; boolean,
integer,
jsonb,
pgTable,
text,
timestamp,
} from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import type z from "zod";
export const serverStats = pgTable("stats", { export const appStats = pgTable("app_stats", {
id: text("id").primaryKey().default("serverStats"), id: text("id").primaryKey().default("primary"),
build: integer("build").notNull().default(1), currentBuild: integer("current_build").notNull().default(1),
lastUpdate: timestamp("last_update").defaultNow(), lastBuildAt: timestamp("last_build_at"),
lastDeployAt: timestamp("last_deploy_at"),
building: boolean("building").notNull().default(false),
updating: boolean("updating").notNull().default(false),
lastUpdated: timestamp("last_updated").defaultNow(),
meta: jsonb("meta").$type<Record<string, unknown>>().default({}),
}); });
export type ServerStats = InferSelectModel<typeof serverStats>; export const appStatsSchema = createSelectSchema(appStats);
export const newAppStatsSchema = createInsertSchema(appStats, {});
export type AppStats = z.infer<typeof appStatsSchema>;
export type NewAppStats = z.infer<typeof newAppStatsSchema>;

View File

@@ -1,5 +1,5 @@
import type { Express } from "express"; import type { Express } from "express";
import { setupAdminRoutes } from "./admin/admin.routes.js";
import { setupAuthRoutes } from "./auth/auth.routes.js"; import { setupAuthRoutes } from "./auth/auth.routes.js";
// import the routes and route setups // import the routes and route setups
import { setupApiDocsRoutes } from "./configs/scaler.config.js"; import { setupApiDocsRoutes } from "./configs/scaler.config.js";
@@ -16,6 +16,7 @@ import { setupUtilsRoutes } from "./utils/utils.routes.js";
export const setupRoutes = (baseUrl: string, app: Express) => { export const setupRoutes = (baseUrl: string, app: Express) => {
//routes that are on by default //routes that are on by default
setupSystemRoutes(baseUrl, app); setupSystemRoutes(baseUrl, app);
setupAdminRoutes(baseUrl, app);
setupApiDocsRoutes(baseUrl, app); setupApiDocsRoutes(baseUrl, app);
setupProdSqlRoutes(baseUrl, app); setupProdSqlRoutes(baseUrl, app);
setupGPSqlRoutes(baseUrl, app); setupGPSqlRoutes(baseUrl, app);

View File

@@ -15,6 +15,7 @@ import { opendockSocketMonitor } from "./opendock/opendockSocketMonitor.utils.js
import { connectProdSql } from "./prodSql/prodSqlConnection.controller.js"; import { connectProdSql } from "./prodSql/prodSqlConnection.controller.js";
import { monitorAlplaPurchase } from "./purchase/purchase.controller.js"; import { monitorAlplaPurchase } from "./purchase/purchase.controller.js";
import { setupSocketIORoutes } from "./socket.io/serverSetup.js"; import { setupSocketIORoutes } from "./socket.io/serverSetup.js";
import { serversChecks } from "./system/serverData.controller.js";
import { baseSettingValidationCheck } from "./system/settingsBase.controller.js"; import { baseSettingValidationCheck } from "./system/settingsBase.controller.js";
import { startTCPServer } from "./tcpServer/tcp.server.js"; import { startTCPServer } from "./tcpServer/tcp.server.js";
import { createCronJob } from "./utils/croner.utils.js"; import { createCronJob } from "./utils/croner.utils.js";
@@ -70,6 +71,7 @@ const start = async () => {
// one shots only needed to run on server startups // one shots only needed to run on server startups
createNotifications(); createNotifications();
startNotifications(); startNotifications();
serversChecks();
}, 5 * 1000); }, 5 * 1000);
process.on("uncaughtException", async (err) => { process.on("uncaughtException", async (err) => {

View File

@@ -9,7 +9,7 @@ type RoomDefinition<T = unknown> = {
export const protectedRooms: any = { export const protectedRooms: any = {
logs: { requiresAuth: true, role: ["admin", "systemAdmin"] }, logs: { requiresAuth: true, role: ["admin", "systemAdmin"] },
admin: { requiresAuth: true, role: ["admin", "systemAdmin"] }, //admin: { requiresAuth: false, role: ["admin", "systemAdmin"] },
}; };
export const roomDefinition: Record<RoomId, RoomDefinition> = { export const roomDefinition: Record<RoomId, RoomDefinition> = {
@@ -36,4 +36,16 @@ export const roomDefinition: Record<RoomId, RoomDefinition> = {
return []; return [];
}, },
}, },
admin: {
seed: async (limit) => {
console.info(limit);
return [];
},
},
"admin:build": {
seed: async (limit) => {
console.info(limit);
return [];
},
},
}; };

View File

@@ -88,14 +88,12 @@ export const setupSocketIORoutes = (baseUrl: string, server: HttpServer) => {
}); });
} }
const roles = Array.isArray(config.role) ? config.role : [config.role]; const roles = Array.isArray(config?.role) ? config?.role : [config?.role];
console.log(roles, s.user.role);
//if (config?.role && s.user?.role !== config.role) { //if (config?.role && s.user?.role !== config.role) {
if (config?.role && !roles.includes(s.user?.role)) { if (config?.role && !roles.includes(s.user?.role)) {
return s.emit("room-error", { return s.emit("room-error", {
room: rn, roomId: rn,
message: `Not authorized to be in room: ${rn}`, message: `Not authorized to be in room: ${rn}`,
}); });
} }

View File

@@ -1 +1 @@
export type RoomId = "logs" | "labels"; //| "alerts" | "metrics"; export type RoomId = "logs" | "labels" | "admin" | "admin:build"; //| "alerts" | "metrics";

View File

@@ -0,0 +1,132 @@
import { sql } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import {
type NewServerData,
serverData,
} from "../db/schema/serverData.schema.js";
import { createLogger } from "../logger/logger.controller.js";
import { tryCatch } from "../utils/trycatch.utils.js";
const servers: NewServerData[] = [
{
name: "Test server 1",
server: "USMCD1VMS036",
plantToken: "test3",
idAddress: "10.193.0.56",
greatPlainsPlantCode: "00",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
{
name: "Test server 2",
server: "USIOW1VMS036",
plantToken: "test2",
idAddress: "10.75.0.56",
greatPlainsPlantCode: "00",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
{
name: "Lima",
server: "USLIM1VMS006",
plantToken: "uslim1",
idAddress: "10.53.0.26",
greatPlainsPlantCode: "50",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
{
name: "Houston",
server: "ushou1VMS006",
plantToken: "ushou1",
idAddress: "10.195.0.26",
greatPlainsPlantCode: "20",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
{
name: "Dayton",
server: "usday1VMS006",
plantToken: "usday1",
idAddress: "10.44.0.56",
greatPlainsPlantCode: "80",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
{
name: "West Bend",
server: "usweb1VMS006",
plantToken: "usweb1",
idAddress: "10.80.0.26",
greatPlainsPlantCode: "65",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
{
name: "Jeff City",
server: "usjci1VMS006",
plantToken: "usjci",
idAddress: "10.167.0.26",
greatPlainsPlantCode: "40",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
{
name: "Sherman",
server: "usshe1vms006",
plantToken: "usshe1",
idAddress: "10.205.0.26",
greatPlainsPlantCode: "21",
contactEmail: "",
contactPhone: "",
serverLoc: "D$\\LST_V3",
buildNumber: 1,
},
];
export const serversChecks = async () => {
const log = createLogger({ module: "system", subModule: "serverData" });
const { data, error } = await tryCatch(
db
.insert(serverData)
.values(servers)
.onConflictDoUpdate({
target: serverData.plantToken,
set: {
server: sql`excluded.server`,
name: sql`excluded.name`,
idAddress: sql`excluded."id_address"`,
greatPlainsPlantCode: sql`excluded.great_plains_plant_code`,
contactEmail: sql`excluded."contact_email"`,
contactPhone: sql`excluded.contact_phone`,
serverLoc: sql`excluded.server_loc`,
},
})
.returning(),
);
if (error) {
log.error(
{ error: error },
"There was an error when adding or updating the servers.",
);
}
if (data) {
log.info({}, "All Servers were added/updated");
}
};

View File

@@ -0,0 +1,43 @@
import { type Response, Router } from "express";
import { db } from "../db/db.controller.js";
import { serverData } from "../db/schema/serverData.schema.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
// export const updateSetting = async (setting: Setting) => {
// // TODO: when the setting is a feature setting we will need to have it run each kill switch on the crons well just stop them and during a reset it just wont start them
// // TODO: when the setting is a system we will need to force an app restart
// // TODO: when the setting is standard we don't do anything.
// };
const r = Router();
r.get("/", async (_, res: Response) => {
const { data: sName, error: sError } = await tryCatch(
db.select().from(serverData).orderBy(serverData.name),
);
if (sError) {
return apiReturn(res, {
success: false,
level: "error",
module: "system",
subModule: "serverData",
message: `There was an error getting the servers `,
data: [sError],
status: 400,
});
}
return apiReturn(res, {
success: true,
level: "info",
module: "system",
subModule: "serverData",
message: `All current servers`,
data: sName ?? [],
status: 200,
});
});
export default r;

View File

@@ -1,5 +1,6 @@
import type { Express } from "express"; import type { Express } from "express";
import { requireAuth } from "../middleware/auth.middleware.js"; import { requireAuth } from "../middleware/auth.middleware.js";
import getServers from "./serverData.route.js";
import getSettings from "./settings.route.js"; import getSettings from "./settings.route.js";
import updSetting from "./settingsUpdate.route.js"; import updSetting from "./settingsUpdate.route.js";
import stats from "./stats.route.js"; import stats from "./stats.route.js";
@@ -10,6 +11,7 @@ export const setupSystemRoutes = (baseUrl: string, app: Express) => {
app.use(`${baseUrl}/api/stats`, stats); app.use(`${baseUrl}/api/stats`, stats);
app.use(`${baseUrl}/api/mobile`, mobile); app.use(`${baseUrl}/api/mobile`, mobile);
app.use(`${baseUrl}/api/settings`, getSettings); app.use(`${baseUrl}/api/settings`, getSettings);
app.use(`${baseUrl}/api/servers`, getServers);
app.use(`${baseUrl}/api/settings`, requireAuth, updSetting); app.use(`${baseUrl}/api/settings`, requireAuth, updSetting);
// all other system should be under /api/system/* // all other system should be under /api/system/*

View File

@@ -0,0 +1,91 @@
import { spawn } from "node:child_process";
import { createLogger } from "../logger/logger.controller.js";
import { emitToRoom } from "../socket.io/roomEmitter.socket.js";
import { updateAppStats } from "./updateAppStats.utils.js";
import { zipBuild } from "./zipper.utils.js";
export const emitBuildLog = (message: string, level = "info") => {
const payload = {
type: "build",
level,
message,
timestamp: new Date().toISOString(),
};
//console.log(`[BUILD][${level.toUpperCase()}] ${message}`);
emitToRoom("admin:build", payload as any);
if (payload.level === "info") {
log.info({ stack: payload }, payload.message);
}
// if (log) {
// log(payload);
// }
};
export let building = false;
const log = createLogger({ module: "utils", subModule: "builds" });
export const build = async () => {
const appDir = process.env.DEV_DIR ?? "";
return new Promise((resolve) => {
building = true;
updateAppStats({
lastUpdated: new Date(),
building: true,
});
emitBuildLog(`Starting build in: ${appDir}`);
const child = spawn("npm", ["run", "build"], {
cwd: appDir,
shell: true,
});
child.stdout.on("data", (data) => {
const lines = data.toString().split(/\r?\n/);
for (const line of lines) {
if (line.trim() !== "") {
emitBuildLog(line, "info");
}
}
});
child.stderr.on("data", (data) => {
const lines = data.toString().split(/\r?\n/);
for (const line of lines) {
if (line.trim() !== "") {
emitBuildLog(line, "error");
}
}
});
child.on("close", (code) => {
if (code === 0) {
emitBuildLog("Build completed successfully.", "info");
building = false;
zipBuild();
resolve(true);
} else {
building = false;
updateAppStats({
lastUpdated: new Date(),
building: false,
});
emitBuildLog(`Build failed with code ${code}`, "error");
//reject(new Error(`Build failed with code ${code}`));
}
});
child.on("error", (err) => {
building = false;
updateAppStats({
lastUpdated: new Date(),
building: false,
});
emitBuildLog(`Process error: ${err.message}`, "error");
// reject(err);
});
});
};

123
backend/utils/deployApp.ts Normal file
View File

@@ -0,0 +1,123 @@
import { spawn } from "node:child_process";
import { eq, sql } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { serverData } from "../db/schema/serverData.schema.js";
import { appStats } from "../db/schema/stats.schema.js";
//import { createLogger } from "../logger/logger.controller.js";
import { emitBuildLog } from "./build.utils.js";
import { returnFunc } from "./returnHelper.utils.js";
// const log = createLogger({ module: "utils", subModule: "deploy" });
export let updating = false;
const updateServerBuildNumber = async (token: string) => {
// get the current build
const buildNum = await db.select().from(appStats);
// update the build now
await db
.update(serverData)
.set({ buildNumber: buildNum[0]?.currentBuild, lastUpdated: sql`NOW()` })
.where(eq(serverData.plantToken, token));
};
export const runUpdate = ({
server,
destination,
token,
}: {
server: string;
destination: string;
token: string;
}) => {
return new Promise((resolve, reject) => {
updating = true;
const scriptPath = process.env.UPDATE_SCRIPT_PATH;
if (!scriptPath) {
return returnFunc({
success: true,
level: "error",
module: "utils",
subModule: "deploy",
message: "UPDATE_SCRIPT_PATH please make sure you have this set.",
data: [],
notify: true,
room: "admin",
});
}
const args = [
"-ExecutionPolicy",
"Bypass",
"-File",
scriptPath,
"-Server",
server,
"-Destination",
destination,
"-Token",
token,
"-ADM_USER",
process.env.DEV_USER ?? "",
"-ADM_PASSWORD",
process.env.DEV_PASSWORD ?? "",
"-AppDir",
process.env.DEV_DIR ?? "",
];
emitBuildLog(`Starting update for ${server}`);
const child = spawn("powershell.exe", args, {
shell: false,
});
child.stdout.on("data", (data) => {
const lines = data.toString().split(/\r?\n/);
for (const line of lines) {
if (line.trim()) {
emitBuildLog(line);
}
}
});
child.stderr.on("data", (data) => {
const lines = data.toString().split(/\r?\n/);
for (const line of lines) {
if (line.trim()) {
emitBuildLog(line, "error");
}
}
});
child.on("close", (code) => {
if (code === 0) {
emitBuildLog(`Update completed for ${server}`);
updating = false;
updateServerBuildNumber(token);
resolve({
success: true,
message: `Update completed for ${server}`,
data: [],
});
} else {
emitBuildLog(`Update failed for ${server} (code ${code})`, "error");
updating = false;
reject({
success: false,
message: `Update failed for ${server} (code ${code})`,
data: [],
});
}
});
child.on("error", (err) => {
emitBuildLog(`Process error: ${err.message}`, "error");
updating = false;
reject({
success: false,
message: `${server}: Encountered an error while processing: ${err.message} `,
data: err,
});
});
});
};

View File

@@ -14,7 +14,8 @@ export interface ReturnHelper<T = unknown[]> {
| "email" | "email"
| "purchase" | "purchase"
| "tcp" | "tcp"
| "logistics"; | "logistics"
| "admin";
subModule: string; subModule: string;
level: "info" | "error" | "debug" | "fatal" | "warn"; level: "info" | "error" | "debug" | "fatal" | "warn";

View File

@@ -0,0 +1,17 @@
import { db } from "../db/db.controller.js";
import { appStats } from "../db/schema/stats.schema.js";
export const updateAppStats = async (
data: Partial<typeof appStats.$inferInsert>,
) => {
await db
.insert(appStats)
.values({
id: "primary",
...data,
})
.onConflictDoUpdate({
target: appStats.id,
set: data,
});
};

View File

@@ -0,0 +1,177 @@
import fs from "node:fs";
import fsp from "node:fs/promises";
import path from "node:path";
import archiver from "archiver";
import { createLogger } from "../logger/logger.controller.js";
import { emitBuildLog } from "./build.utils.js";
import { updateAppStats } from "./updateAppStats.utils.js";
const log = createLogger({ module: "utils", subModule: "zip" });
const exists = async (target: string) => {
try {
await fsp.access(target);
return true;
} catch {
return false;
}
};
const getNextBuildNumber = async (buildNumberFile: string) => {
if (!(await exists(buildNumberFile))) {
await fsp.writeFile(buildNumberFile, "1", "utf8");
return 1;
}
const raw = await fsp.readFile(buildNumberFile, "utf8");
const current = Number.parseInt(raw.trim(), 10);
if (Number.isNaN(current) || current < 1) {
await fsp.writeFile(buildNumberFile, "1", "utf8");
return 1;
}
const next = current + 1;
await fsp.writeFile(buildNumberFile, String(next), "utf8");
// update the server with the next build number
await updateAppStats({
currentBuild: next,
lastBuildAt: new Date(),
building: true,
});
return next;
};
const cleanupOldBuilds = async (buildFolder: string, maxBuilds: number) => {
const entries = await fsp.readdir(buildFolder, { withFileTypes: true });
const zipFiles: { fullPath: string; name: string; mtimeMs: number }[] = [];
for (const entry of entries) {
if (!entry.isFile()) continue;
if (!/^LSTV3-\d+\.zip$/i.test(entry.name)) continue;
const fullPath = path.join(buildFolder, entry.name);
const stat = await fsp.stat(fullPath);
zipFiles.push({
fullPath,
name: entry.name,
mtimeMs: stat.mtimeMs,
});
}
zipFiles.sort((a, b) => b.mtimeMs - a.mtimeMs);
const toRemove = zipFiles.slice(maxBuilds);
for (const file of toRemove) {
await fsp.rm(file.fullPath, { force: true });
emitBuildLog(`Removed old build: ${file.name}`);
}
};
export const zipBuild = async () => {
const appDir = process.env.DEV_DIR ?? "";
const maxBuilds = Number(process.env.MAX_BUILDS ?? 5);
if (!appDir) {
log.error({ notify: true }, "Forgot to add in the dev dir into the env");
return;
}
const includesFile = path.join(appDir, ".includes");
const buildNumberFile = path.join(appDir, ".buildNumber");
const buildFolder = path.join(appDir, "builds");
const tempFolder = path.join(appDir, "temp", "zip-temp");
if (!(await exists(includesFile))) {
log.error({ notify: true }, "Missing .includes file common");
return;
}
await fsp.mkdir(buildFolder, { recursive: true });
const buildNumber = await getNextBuildNumber(buildNumberFile);
const zipFileName = `LSTV3-${buildNumber}.zip`;
const zipFile = path.join(buildFolder, zipFileName);
// make the folders in case they are not created already
emitBuildLog(`Using build number: ${buildNumber}`);
if (await exists(tempFolder)) {
await fsp.rm(tempFolder, { recursive: true, force: true });
}
await fsp.mkdir(tempFolder, { recursive: true });
const includes = (await fsp.readFile(includesFile, "utf8"))
.split(/\r?\n/)
.map((line) => line.trim())
.filter(Boolean);
emitBuildLog(`Preparing zip from ${includes.length} include entries`);
for (const relPath of includes) {
const source = path.join(appDir, relPath);
const dest = path.join(tempFolder, relPath);
if (!(await exists(source))) {
emitBuildLog(`Skipping missing path: ${relPath}`, "error");
continue;
}
const stat = await fsp.stat(source);
await fsp.mkdir(path.dirname(dest), { recursive: true });
if (stat.isDirectory()) {
emitBuildLog(`Copying folder: ${relPath}`);
await fsp.cp(source, dest, { recursive: true });
} else {
emitBuildLog(`Copying file: ${relPath}`);
await fsp.copyFile(source, dest);
}
}
// if something crazy happens and we get the same build lets just reuse it
// if (await exists(zipFile)) {
// await fsp.rm(zipFile, { force: true });
// }
emitBuildLog(`Creating zip: ${zipFile}`);
await new Promise<void>((resolve, reject) => {
const output = fs.createWriteStream(zipFile);
const archive = archiver("zip", { zlib: { level: 9 } });
output.on("close", () => resolve());
output.on("error", reject);
archive.on("error", reject);
archive.pipe(output);
// zip contents of temp folder, not temp folder itself
archive.directory(tempFolder, false);
archive.finalize();
});
await fsp.rm(tempFolder, { recursive: true, force: true });
emitBuildLog(`Zip completed successfully: ${zipFile}`);
await cleanupOldBuilds(buildFolder, maxBuilds);
await updateAppStats({
lastUpdated: new Date(),
building: false,
});
return {
success: true,
buildNumber,
zipFile,
zipFileName,
};
};

View File

@@ -5,17 +5,20 @@ meta {
} }
get { get {
url: {{url}}/api/datamart/:name?historical=x url: {{url}}/api/datamart/:name?articles=118,120&startDate=2026-01-01&endDate=2026-12-31&all=x
body: none body: none
auth: inherit auth: inherit
} }
params:query { params:query {
historical: x articles: 118,120
startDate: 2026-01-01
endDate: 2026-12-31
all: x
} }
params:path { params:path {
name: inventory name: deliveryByDateRange
} }
settings { settings {

View File

@@ -19,6 +19,8 @@
"better-auth": "^1.5.5", "better-auth": "^1.5.5",
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0",
"lucide-react": "^0.577.0", "lucide-react": "^0.577.0",
"next-themes": "^0.4.6", "next-themes": "^0.4.6",
"radix-ui": "^1.4.3", "radix-ui": "^1.4.3",
@@ -6016,6 +6018,25 @@
"node": ">= 12" "node": ">= 12"
} }
}, },
"node_modules/date-fns": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz",
"integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==",
"license": "MIT",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/kossnocorp"
}
},
"node_modules/date-fns-tz": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/date-fns-tz/-/date-fns-tz-3.2.0.tgz",
"integrity": "sha512-sg8HqoTEulcbbbVXeg84u5UnlsQa8GS5QXMqjjYIhS4abEVVKIUwe0/l/UhrZdKaL/W5eWZNlbTeEIiOXTcsBQ==",
"license": "MIT",
"peerDependencies": {
"date-fns": "^3.0.0 || ^4.0.0"
}
},
"node_modules/debug": { "node_modules/debug": {
"version": "4.4.3", "version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",

View File

@@ -34,7 +34,9 @@
"tailwind-merge": "^3.5.0", "tailwind-merge": "^3.5.0",
"tailwindcss": "^4.2.1", "tailwindcss": "^4.2.1",
"tw-animate-css": "^1.4.0", "tw-animate-css": "^1.4.0",
"zod": "^4.3.6" "zod": "^4.3.6",
"date-fns": "^4.1.0",
"date-fns-tz": "^3.2.0"
}, },
"devDependencies": { "devDependencies": {
"@eslint/js": "^9.36.0", "@eslint/js": "^9.36.0",

View File

@@ -1,5 +1,5 @@
import { Link } from "@tanstack/react-router"; import { Link } from "@tanstack/react-router";
import { Bell, Logs, Settings } from "lucide-react"; import { Bell, Logs, Server, Settings } from "lucide-react";
import { import {
SidebarGroup, SidebarGroup,
@@ -40,6 +40,14 @@ export default function AdminSidebar({ session }: any) {
module: "admin", module: "admin",
active: true, active: true,
}, },
{
title: "Servers",
url: "/admin/servers",
icon: Server,
role: ["systemAdmin", "admin"],
module: "admin",
active: true,
},
{ {
title: "Logs", title: "Logs",
url: "/admin/logs", url: "/admin/logs",

View File

@@ -1,22 +1,55 @@
import { useEffect, useState } from "react"; import { useCallback, useEffect, useState } from "react";
import socket from "@/lib/socket.io"; import socket from "@/lib/socket.io";
export function useSocketRoom<T>(roomId: string) { type RoomUpdatePayload<T> = {
roomId: string;
payloads: T[];
};
type RoomErrorPayload = {
roomId?: string;
message?: string;
};
export function useSocketRoom<T>(
roomId: string,
getKey?: (item: T) => string | number,
) {
const [data, setData] = useState<T[]>([]); const [data, setData] = useState<T[]>([]);
const [info, setInfo] = useState( const [info, setInfo] = useState(
"No data yet — join the room to start receiving", "No data yet — join the room to start receiving",
); );
const clearRoom = useCallback(
(id?: string | number) => {
if (id !== undefined && getKey) {
setData((prev) => prev.filter((item) => getKey(item) !== id));
setInfo(`Removed item ${id}`);
return;
}
setData([]);
setInfo("Room data cleared");
},
[getKey],
);
useEffect(() => { useEffect(() => {
function handleConnect() { function handleConnect() {
socket.emit("join-room", roomId); socket.emit("join-room", roomId);
setInfo(`Joined room: ${roomId}`);
} }
function handleUpdate(payload: any) { function handleUpdate(payload: RoomUpdatePayload<T>) {
// protects against other room updates hitting this hook
if (payload.roomId !== roomId) return;
setData((prev) => [...payload.payloads, ...prev]); setData((prev) => [...payload.payloads, ...prev]);
setInfo("");
} }
function handleError(err: any) { function handleError(err: RoomErrorPayload) {
if (err.roomId && err.roomId !== roomId) return;
setInfo(err.message ?? "Room error"); setInfo(err.message ?? "Room error");
} }
@@ -31,6 +64,7 @@ export function useSocketRoom<T>(roomId: string) {
// If already connected, join immediately // If already connected, join immediately
if (socket.connected) { if (socket.connected) {
socket.emit("join-room", roomId); socket.emit("join-room", roomId);
setInfo(`Joined room: ${roomId}`);
} }
return () => { return () => {
@@ -42,5 +76,5 @@ export function useSocketRoom<T>(roomId: string) {
}; };
}, [roomId]); }, [roomId]);
return { data, info }; return { data, info, clearRoom };
} }

View File

@@ -0,0 +1,22 @@
import { keepPreviousData, queryOptions } from "@tanstack/react-query";
import axios from "axios";
export function servers() {
return queryOptions({
queryKey: ["servers"],
queryFn: () => fetch(),
staleTime: 5000,
refetchOnWindowFocus: true,
placeholderData: keepPreviousData,
});
}
const fetch = async () => {
if (window.location.hostname === "localhost") {
await new Promise((res) => setTimeout(res, 1500));
}
const { data } = await axios.get("/lst/api/servers");
return data.data;
};

View File

@@ -105,6 +105,7 @@ export default function LstTable({
</TableBody> </TableBody>
</Table> </Table>
<ScrollBar orientation="horizontal" /> <ScrollBar orientation="horizontal" />
<ScrollBar orientation="vertical" />
</ScrollArea> </ScrollArea>
<div className="flex items-center justify-end space-x-2 py-4"> <div className="flex items-center justify-end space-x-2 py-4">
<Button <Button

View File

@@ -14,6 +14,7 @@ import { Route as IndexRouteImport } from './routes/index'
import { Route as DocsIndexRouteImport } from './routes/docs/index' import { Route as DocsIndexRouteImport } from './routes/docs/index'
import { Route as DocsSplatRouteImport } from './routes/docs/$' import { Route as DocsSplatRouteImport } from './routes/docs/$'
import { Route as AdminSettingsRouteImport } from './routes/admin/settings' import { Route as AdminSettingsRouteImport } from './routes/admin/settings'
import { Route as AdminServersRouteImport } from './routes/admin/servers'
import { Route as AdminNotificationsRouteImport } from './routes/admin/notifications' import { Route as AdminNotificationsRouteImport } from './routes/admin/notifications'
import { Route as AdminLogsRouteImport } from './routes/admin/logs' import { Route as AdminLogsRouteImport } from './routes/admin/logs'
import { Route as authLoginRouteImport } from './routes/(auth)/login' import { Route as authLoginRouteImport } from './routes/(auth)/login'
@@ -46,6 +47,11 @@ const AdminSettingsRoute = AdminSettingsRouteImport.update({
path: '/admin/settings', path: '/admin/settings',
getParentRoute: () => rootRouteImport, getParentRoute: () => rootRouteImport,
} as any) } as any)
const AdminServersRoute = AdminServersRouteImport.update({
id: '/admin/servers',
path: '/admin/servers',
getParentRoute: () => rootRouteImport,
} as any)
const AdminNotificationsRoute = AdminNotificationsRouteImport.update({ const AdminNotificationsRoute = AdminNotificationsRouteImport.update({
id: '/admin/notifications', id: '/admin/notifications',
path: '/admin/notifications', path: '/admin/notifications',
@@ -83,6 +89,7 @@ export interface FileRoutesByFullPath {
'/login': typeof authLoginRoute '/login': typeof authLoginRoute
'/admin/logs': typeof AdminLogsRoute '/admin/logs': typeof AdminLogsRoute
'/admin/notifications': typeof AdminNotificationsRoute '/admin/notifications': typeof AdminNotificationsRoute
'/admin/servers': typeof AdminServersRoute
'/admin/settings': typeof AdminSettingsRoute '/admin/settings': typeof AdminSettingsRoute
'/docs/$': typeof DocsSplatRoute '/docs/$': typeof DocsSplatRoute
'/docs/': typeof DocsIndexRoute '/docs/': typeof DocsIndexRoute
@@ -96,6 +103,7 @@ export interface FileRoutesByTo {
'/login': typeof authLoginRoute '/login': typeof authLoginRoute
'/admin/logs': typeof AdminLogsRoute '/admin/logs': typeof AdminLogsRoute
'/admin/notifications': typeof AdminNotificationsRoute '/admin/notifications': typeof AdminNotificationsRoute
'/admin/servers': typeof AdminServersRoute
'/admin/settings': typeof AdminSettingsRoute '/admin/settings': typeof AdminSettingsRoute
'/docs/$': typeof DocsSplatRoute '/docs/$': typeof DocsSplatRoute
'/docs': typeof DocsIndexRoute '/docs': typeof DocsIndexRoute
@@ -110,6 +118,7 @@ export interface FileRoutesById {
'/(auth)/login': typeof authLoginRoute '/(auth)/login': typeof authLoginRoute
'/admin/logs': typeof AdminLogsRoute '/admin/logs': typeof AdminLogsRoute
'/admin/notifications': typeof AdminNotificationsRoute '/admin/notifications': typeof AdminNotificationsRoute
'/admin/servers': typeof AdminServersRoute
'/admin/settings': typeof AdminSettingsRoute '/admin/settings': typeof AdminSettingsRoute
'/docs/$': typeof DocsSplatRoute '/docs/$': typeof DocsSplatRoute
'/docs/': typeof DocsIndexRoute '/docs/': typeof DocsIndexRoute
@@ -125,6 +134,7 @@ export interface FileRouteTypes {
| '/login' | '/login'
| '/admin/logs' | '/admin/logs'
| '/admin/notifications' | '/admin/notifications'
| '/admin/servers'
| '/admin/settings' | '/admin/settings'
| '/docs/$' | '/docs/$'
| '/docs/' | '/docs/'
@@ -138,6 +148,7 @@ export interface FileRouteTypes {
| '/login' | '/login'
| '/admin/logs' | '/admin/logs'
| '/admin/notifications' | '/admin/notifications'
| '/admin/servers'
| '/admin/settings' | '/admin/settings'
| '/docs/$' | '/docs/$'
| '/docs' | '/docs'
@@ -151,6 +162,7 @@ export interface FileRouteTypes {
| '/(auth)/login' | '/(auth)/login'
| '/admin/logs' | '/admin/logs'
| '/admin/notifications' | '/admin/notifications'
| '/admin/servers'
| '/admin/settings' | '/admin/settings'
| '/docs/$' | '/docs/$'
| '/docs/' | '/docs/'
@@ -165,6 +177,7 @@ export interface RootRouteChildren {
authLoginRoute: typeof authLoginRoute authLoginRoute: typeof authLoginRoute
AdminLogsRoute: typeof AdminLogsRoute AdminLogsRoute: typeof AdminLogsRoute
AdminNotificationsRoute: typeof AdminNotificationsRoute AdminNotificationsRoute: typeof AdminNotificationsRoute
AdminServersRoute: typeof AdminServersRoute
AdminSettingsRoute: typeof AdminSettingsRoute AdminSettingsRoute: typeof AdminSettingsRoute
DocsSplatRoute: typeof DocsSplatRoute DocsSplatRoute: typeof DocsSplatRoute
DocsIndexRoute: typeof DocsIndexRoute DocsIndexRoute: typeof DocsIndexRoute
@@ -210,6 +223,13 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof AdminSettingsRouteImport preLoaderRoute: typeof AdminSettingsRouteImport
parentRoute: typeof rootRouteImport parentRoute: typeof rootRouteImport
} }
'/admin/servers': {
id: '/admin/servers'
path: '/admin/servers'
fullPath: '/admin/servers'
preLoaderRoute: typeof AdminServersRouteImport
parentRoute: typeof rootRouteImport
}
'/admin/notifications': { '/admin/notifications': {
id: '/admin/notifications' id: '/admin/notifications'
path: '/admin/notifications' path: '/admin/notifications'
@@ -261,6 +281,7 @@ const rootRouteChildren: RootRouteChildren = {
authLoginRoute: authLoginRoute, authLoginRoute: authLoginRoute,
AdminLogsRoute: AdminLogsRoute, AdminLogsRoute: AdminLogsRoute,
AdminNotificationsRoute: AdminNotificationsRoute, AdminNotificationsRoute: AdminNotificationsRoute,
AdminServersRoute: AdminServersRoute,
AdminSettingsRoute: AdminSettingsRoute, AdminSettingsRoute: AdminSettingsRoute,
DocsSplatRoute: DocsSplatRoute, DocsSplatRoute: DocsSplatRoute,
DocsIndexRoute: DocsIndexRoute, DocsIndexRoute: DocsIndexRoute,

View File

@@ -0,0 +1,245 @@
import { useSuspenseQuery } from "@tanstack/react-query";
import { createFileRoute, redirect } from "@tanstack/react-router";
import { createColumnHelper } from "@tanstack/react-table";
import axios from "axios";
import { format } from "date-fns-tz";
import { CircleFadingArrowUp, Trash } from "lucide-react";
import { Suspense, useState } from "react";
import { toast } from "sonner";
import { Button } from "../../components/ui/button";
import { Spinner } from "../../components/ui/spinner";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "../../components/ui/tooltip";
import { useSocketRoom } from "../../hooks/socket.io.hook";
import { authClient } from "../../lib/auth-client";
import { servers } from "../../lib/queries/servers";
import LstTable from "../../lib/tableStuff/LstTable";
import SearchableHeader from "../../lib/tableStuff/SearchableHeader";
import SkellyTable from "../../lib/tableStuff/SkellyTable";
export const Route = createFileRoute("/admin/servers")({
beforeLoad: async ({ location }) => {
const { data: session } = await authClient.getSession();
const allowedRole = ["systemAdmin", "admin"];
if (!session?.user) {
throw redirect({
to: "/",
search: {
redirect: location.href,
},
});
}
if (!allowedRole.includes(session.user.role as string)) {
throw redirect({
to: "/",
});
}
return { user: session.user };
},
component: RouteComponent,
});
const ServerTable = () => {
const { data, refetch } = useSuspenseQuery(servers());
const columnHelper = createColumnHelper<any>();
const columns = [
columnHelper.accessor("name", {
header: ({ column }) => (
<SearchableHeader column={column} title="Name" searchable={true} />
),
filterFn: "includesString",
cell: (i) => i.getValue(),
}),
columnHelper.accessor("greatPlainsPlantCode", {
header: ({ column }) => (
<SearchableHeader column={column} title="GP Code" />
),
cell: (i) => <span>{i.getValue().toUpperCase()}</span>,
}),
columnHelper.accessor("server", {
header: ({ column }) => (
<SearchableHeader column={column} title="server" />
),
cell: (i) => <span>{i.getValue().toUpperCase()}</span>,
}),
columnHelper.accessor("idAddress", {
header: ({ column }) => (
<SearchableHeader column={column} title="IP Address" />
),
cell: (i) => <span>{i.getValue()}</span>,
}),
columnHelper.accessor("lastUpdated", {
header: ({ column }) => (
<SearchableHeader column={column} title="Last Update" />
),
cell: (i) => <span>{format(i.getValue(), "M/d/yyyy HH:mm")}</span>,
}),
columnHelper.accessor("buildNumber", {
header: ({ column }) => (
<SearchableHeader column={column} title="Build" />
),
cell: (i) => <span>{i.getValue()}</span>,
}),
columnHelper.accessor("update", {
header: ({ column }) => (
<SearchableHeader column={column} title="Update" searchable={false} />
),
filterFn: "includesString",
cell: (i) => {
// biome-ignore lint: just removing the lint for now to get this going will maybe fix later
const [activeToggle, setActiveToggle] = useState(false);
const onToggle = async () => {
setActiveToggle(true);
toast.success(
`${i.row.original.name} just started the upgrade monitor logs for errors.`,
);
try {
const res = await axios.post(
`/lst/api/admin/build/updateServer`,
{
server: i.row.original.server,
destination: i.row.original.serverLoc,
token: i.row.original.plantToken,
},
{ withCredentials: true },
);
if (res.data.success) {
toast.success(
`${i.row.original.name} has completed its upgrade.`,
);
refetch();
setActiveToggle(false);
}
} catch (error) {
setActiveToggle(false);
console.error(error);
}
};
return (
<div>
<div className="flex items-center space-x-2">
<Button
variant="ghost"
disabled={activeToggle}
onClick={() => onToggle()}
>
{activeToggle ? (
<span>
<Spinner />
</span>
) : (
<span>
<CircleFadingArrowUp />
</span>
)}
</Button>
</div>
</div>
);
},
}),
];
return <LstTable data={data} columns={columns} />;
};
function RouteComponent() {
const { data: logs = [], clearRoom } = useSocketRoom<any>("admin:build");
const columnHelper = createColumnHelper<any>();
const logColumns = [
columnHelper.accessor("timestamp", {
header: ({ column }) => (
<SearchableHeader column={column} title="Time" searchable={false} />
),
filterFn: "includesString",
cell: (i) => format(i.getValue(), "M/d/yyyy HH:mm"),
}),
columnHelper.accessor("message", {
header: ({ column }) => (
<SearchableHeader column={column} title="Message" />
),
cell: (i) => (
<Tooltip>
<TooltipTrigger>
{i.getValue()?.length > 250 ? (
<span>{i.getValue().slice(0, 250)}...</span>
) : (
<span>{i.getValue()}</span>
)}
</TooltipTrigger>
<TooltipContent>{i.getValue()}</TooltipContent>
</Tooltip>
),
}),
columnHelper.accessor("clearLog", {
header: ({ column }) => (
<SearchableHeader column={column} title="Clear" />
),
cell: ({ row }) => {
const x = row.original;
return (
<Button
size="icon"
variant={"destructive"}
onClick={() => clearRoom(x.timestamp)}
>
<Trash />
</Button>
);
},
}),
];
const triggerBuild = async () => {
try {
const res = await axios.post(
`/lst/api/admin/build/release`,
{
withCredentials: true,
},
);
if (res.data.success) {
toast.success(res.data.message);
}
if (!res.data.success) {
toast.error(res.data.message);
}
} catch (err) {
console.log(err);
//toast.error(err?.message);
}
};
//console.log(logs);
return (
<div className="flex flex-col gap-1">
<div className="flex gap-1 justify-end">
<Button onClick={triggerBuild}>Trigger Build</Button>
<Button onClick={() => clearRoom()}>Clear Logs</Button>
</div>
<div className="flex gap-1 w-full">
<div className="w-full">
<Suspense fallback={<SkellyTable />}>
<ServerTable />
</Suspense>
</div>
<div className="w-1/2">
<LstTable data={logs} columns={logColumns} />
</div>
</div>
</div>
);
}

View File

@@ -40,7 +40,7 @@ export default function Index() {
</Text>, </Text>,
); );
await devDelay(1500); await devDelay(1500);
router.replace("/setup"); router.replace("/scanner");
return; return;
} }

View File

@@ -13,12 +13,14 @@ export default function setup() {
const serverIpFromStore = useAppStore((s) => s.serverIp); const serverIpFromStore = useAppStore((s) => s.serverIp);
const serverPortFromStore = useAppStore((s) => s.serverPort); const serverPortFromStore = useAppStore((s) => s.serverPort);
const scannerIdFromStore = useAppStore((s) => s.scannerId);
const updateAppState = useAppStore((s) => s.updateAppState); const updateAppState = useAppStore((s) => s.updateAppState);
// local form state // local form state
const [serverIp, setLocalServerIp] = useState(serverIpFromStore); const [serverIp, setLocalServerIp] = useState(serverIpFromStore);
const [serverPort, setLocalServerPort] = useState(serverPortFromStore); const [serverPort, setLocalServerPort] = useState(serverPortFromStore);
const [scannerId, setScannerId] = useState(scannerIdFromStore);
const authCheck = () => { const authCheck = () => {
if (pin === "6971") { if (pin === "6971") {
@@ -98,7 +100,7 @@ export default function setup() {
value={serverPort} value={serverPort}
onChangeText={setLocalServerPort} onChangeText={setLocalServerPort}
placeholder="3000" placeholder="3000"
autoCapitalize="characters" //autoCapitalize="characters"
keyboardType="numeric" keyboardType="numeric"
style={{ borderWidth: 1, padding: 10, borderRadius: 8 }} style={{ borderWidth: 1, padding: 10, borderRadius: 8 }}
/> />
@@ -107,8 +109,9 @@ export default function setup() {
<View> <View>
<Text>Scanner ID</Text> <Text>Scanner ID</Text>
<Text style={{ width: 250 }}> <Text style={{ width: 250 }}>
This is needed as you will be redirected to the standard scanner The ID is required to be able to scan. The scanner will be
with no rules except the rules that alplaprod puts in treated as a normal scanner direct to alplaprod. no extra rules
added.
</Text> </Text>
<TextInput <TextInput
value={scannerId} value={scannerId}

View File

@@ -0,0 +1,17 @@
CREATE TABLE "server_data" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text NOT NULL,
"server" text,
"plant_token" text,
"id_address" text,
"great_plains_plantCode" numeric,
"contact_email" text,
"contact_phone" text,
"active" boolean DEFAULT true,
"server_loc" text,
"last_updated" timestamp DEFAULT now(),
"build_number" integer,
"is_upgrading" boolean DEFAULT false
);
--> statement-breakpoint
CREATE UNIQUE INDEX "plant_token" ON "server_data" USING btree ("plant_token");

View File

@@ -0,0 +1 @@
ALTER TABLE "server_data" RENAME COLUMN "great_plains_plantCode" TO "great_plains_plant_code";

View File

@@ -0,0 +1 @@
ALTER TABLE "server_data" ADD CONSTRAINT "server_data_server_unique" UNIQUE("server");

View File

@@ -0,0 +1,2 @@
ALTER TABLE "server_data" DROP CONSTRAINT "server_data_server_unique";--> statement-breakpoint
ALTER TABLE "server_data" ADD CONSTRAINT "server_data_plant_token_unique" UNIQUE("plant_token");

View File

@@ -0,0 +1 @@
ALTER TABLE "server_data" ALTER COLUMN "plant_token" SET NOT NULL;

View File

@@ -0,0 +1,2 @@
DROP INDEX "plant_token";--> statement-breakpoint
ALTER TABLE "server_data" ALTER COLUMN "great_plains_plant_code" SET DATA TYPE text;

View File

@@ -0,0 +1,21 @@
CREATE TABLE "deployment_history" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"server_id" uuid,
"build_number" integer NOT NULL,
"status" text NOT NULL,
"message" text,
"created_at" timestamp DEFAULT now()
);
--> statement-breakpoint
CREATE TABLE "app_stats" (
"id" text PRIMARY KEY DEFAULT 'primary' NOT NULL,
"current_build" integer DEFAULT 1 NOT NULL,
"last_build_at" timestamp,
"last_deploy_at" timestamp,
"building" boolean DEFAULT false NOT NULL,
"updating" boolean DEFAULT false NOT NULL,
"last_updated" timestamp DEFAULT now(),
"meta" jsonb DEFAULT '{}'::jsonb
);
--> statement-breakpoint
DROP TABLE "stats" CASCADE;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -239,6 +239,55 @@
"when": 1776256060808, "when": 1776256060808,
"tag": "0033_elite_adam_warlock", "tag": "0033_elite_adam_warlock",
"breakpoints": true "breakpoints": true
},
{
"idx": 34,
"version": "7",
"when": 1776732155543,
"tag": "0034_groovy_darkhawk",
"breakpoints": true
},
{
"idx": 35,
"version": "7",
"when": 1776733278288,
"tag": "0035_icy_harpoon",
"breakpoints": true
},
{
"idx": 36,
"version": "7",
"when": 1776733364021,
"tag": "0036_easy_magus",
"breakpoints": true
},
{
"idx": 37,
"version": "7",
"when": 1776733842142,
"tag": "0037_glamorous_joseph",
"breakpoints": true
},
{
"idx": 38,
"version": "7",
"when": 1776733879132,
"tag": "0038_special_wildside",
"breakpoints": true
},
{
"idx": 39,
"version": "7",
"when": 1776734237129,
"tag": "0039_special_the_leader",
"breakpoints": true
},
{
"idx": 40,
"version": "7",
"when": 1776770845947,
"tag": "0040_rainy_white_tiger",
"breakpoints": true
} }
] ]
} }

842
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -39,6 +39,7 @@
"@biomejs/biome": "2.4.8", "@biomejs/biome": "2.4.8",
"@commitlint/cli": "^20.5.0", "@commitlint/cli": "^20.5.0",
"@commitlint/config-conventional": "^20.5.0", "@commitlint/config-conventional": "^20.5.0",
"@types/archiver": "^7.0.0",
"@types/cors": "^2.8.19", "@types/cors": "^2.8.19",
"@types/express": "^5.0.6", "@types/express": "^5.0.6",
"@types/morgan": "^1.9.10", "@types/morgan": "^1.9.10",
@@ -66,6 +67,7 @@
"@dotenvx/dotenvx": "^1.57.0", "@dotenvx/dotenvx": "^1.57.0",
"@scalar/express-api-reference": "^0.9.4", "@scalar/express-api-reference": "^0.9.4",
"@socket.io/admin-ui": "^0.5.1", "@socket.io/admin-ui": "^0.5.1",
"archiver": "^7.0.1",
"axios": "^1.13.6", "axios": "^1.13.6",
"better-auth": "^1.5.5", "better-auth": "^1.5.5",
"concurrently": "^9.2.1", "concurrently": "^9.2.1",

150
scripts/updateServer.ps1 Normal file
View File

@@ -0,0 +1,150 @@
param(
[string]$Server,
[string]$Destination,
[string]$Token,
[string]$ADM_USER,
[string]$ADM_PASSWORD,
[string]$AppDir
)
# $credFile = Join-Path $AppDir ".scriptCreds"
# $credData = @{}
# Get-Content $credFile | ForEach-Object {
# if ($_ -match "=") {
# $key, $value = $_ -split "=", 2
# $credData[$key.Trim()] = $value.Trim()
# }
# }
$username = $ADM_USER
$password = $ADM_PASSWORD
$securePass = ConvertTo-SecureString $password -AsPlainText -Force
$credentials = New-Object System.Management.Automation.PSCredential($username, $securePass)
function Update-Server {
param (
[string]$Destination,
[string]$Server,
[string]$Token
)
$buildFile = Join-Path $AppDir ".buildNumber"
$BuildNumber = 1
$BuildFolder = Join-Path $AppDir "builds"
if (Test-Path $BuildFile) {
$content = Get-Content $BuildFile | Select-Object -First 1
$num = $content.Trim() -as [int] # safe cast
if ($num) {
$BuildNumber = $num + 1
}
else {
$BuildNumber = 1
}
}
# Get The current Build we have zipped up
$BuildNumber = ([int]$BuildNumber - 1).ToString()
# copy the latest build over
Write-Host "Forcing the removal of the mapped drive."
Get-PSDrive -Name "z" -ErrorAction SilentlyContinue | Remove-PSDrive -Force
try {
New-PSDrive -Name "z" -PSProvider FileSystem -Root "\\$Server\$Destination" -Credential $credentials
# Create the update folder if it doesn't exist
if (-not (Test-Path -Path "\\$Server\$Destination")) {
New-Item -ItemType Directory -Path "\\$Server\$Destination" -Force
}
# Copying files to the server
Write-Host "Copying files to $($Server)"
$zipFile = Join-Path $BuildFolder "LSTV3-$BuildNumber.zip"
Copy-Item -Path $zipFile -Destination "z:\" -Force
Write-Host "Files copied to $($Server)"
}
catch {
Write-Host "Error: $_"
}
finally {
# Remove the mapped drive after copying
if (Get-PSDrive -Name "z" -ErrorAction SilentlyContinue) {
Write-Host "Removing mapped drive..."
Remove-PSDrive -Name "z"
}
}
Write-Host "Updating the app to LSTV3-$BuildNumber.zip"
# do the stop services, unzip, and restart service and pool
$AppUpdate = {
param ($Server, $Token, $Destination, $BuildFile)
#convert everything to the server fun
$LocalPath = $Destination -replace '\$', ':'
$BuildFileLoc = "$LocalPath\$BuildFile"
Write-Host "Updating the app to $($BuildFile)"
Write-Host "Stopping the services to do the updates, pkgs and db changes."
$app_name = "LSTV3_app$(if ($Token -eq "usiow2") { "_2" })"
# TODO: add in the iis reset later
Write-Host "Stopping $($app_name)"
Stop-Service -DisplayName $app_name -Force
Start-Sleep -Seconds 1
Write-Host "Unzipping the folder..."
try {
# Expand the archive
Expand-Archive -Path $BuildFileLoc -DestinationPath $LocalPath -Force
}
catch {
Write-Host "Error: $_"
exit 1 # Exit with a non-zero code if there's an error
}
# Delete the zip file after extraction
Write-Host "Deleting the zip file..."
Remove-Item -Path $BuildFileLoc -Force
Start-Sleep -Seconds 1
try {
# do the install/update
Push-Location $LocalPath
Write-Host "Running install/update in: $LocalPath"
npm install --omit=dev
Start-Sleep -Seconds 3
Write-Host "Install/update completed."
# do the migrations
# Push-Location $LocalPath
Write-Host "Running migrations"
npm run dev:db:migrate
Start-Sleep -Seconds 3
Write-Host "Migrations Completed."
}
catch {
Write-Host "Migration: $_"
}
finally {
Pop-Location
}
Write-Host "Starting $($app_name)"
Start-Service -DisplayName $app_name -ErrorAction Stop
Start-Sleep -Seconds 1
#Write-Host "Update completed on $($Server)-$($Token)"
}
Invoke-Command -ComputerName $Server -ScriptBlock $AppUpdate -ArgumentList $Server, $Token, $Destination, "LSTV3-$BuildNumber.zip" -Credential $credentials
}
Update-Server -Server $Server -Destination $Destination -Token $Token