feat(labels): added listener for old app to push all labels to the new app

This commit is contained in:
2025-10-17 11:18:31 -05:00
parent 0d1f96333b
commit af079b8306
7 changed files with 2386 additions and 88 deletions

View File

@@ -1,4 +1,7 @@
import { Client } from "pg";
import { db } from "../db/db.js";
import { prodlabels } from "../db/schema/prodLabels.js";
import { tryCatch } from "../utils/tryCatch.js";
import { createLogger } from "./logger.js";
type NewLog = {
@@ -18,13 +21,13 @@ export const v1Listener = async () => {
await client.connect();
// the notify channel to listen for logs on
const channels = ["logs_channel", "users_channel", "orders_channel"];
const channels = ["logs_channel", "label_channel", "orders_channel"];
for (const ch of channels) {
await client.query(`LISTEN ${ch}`);
}
console.log("Listening for:", channels.join(", "));
log.info({ channels: channels }, "Listening for on channels");
// create the log function to be able to mimic what is coming over
const logEvent = (newLog: string) => {
@@ -50,6 +53,20 @@ export const v1Listener = async () => {
);
}
};
const labelEvent = async (newLabel: string) => {
const newLabelEvent: any = JSON.parse(newLabel);
const { data, error } = await tryCatch(
db.insert(prodlabels).values({
printerID: newLabelEvent.printerID,
printerName: newLabelEvent.printerName,
line: newLabelEvent.line,
runningNr: newLabelEvent.runningNr,
status: newLabelEvent.status,
add_user: newLabelEvent.add_user,
}),
);
};
client.on("notification", (msg) => {
// msg.channel tells which channel it came from
// msg.payload is whatever message you sent from the trigger
@@ -57,8 +74,8 @@ export const v1Listener = async () => {
case "logs_channel":
logEvent(msg.payload || "");
break;
case "users_channel":
console.log("👤 User event:", msg.payload);
case "label_channel":
labelEvent(msg.payload || "");
break;
case "orders_channel":
console.log("🛒 Order event:", msg.payload);

View File

@@ -1,12 +0,0 @@
CREATE OR REPLACE FUNCTION notify_new_log()
RETURNS trigger AS $$
BEGIN
PERFORM pg_notify('logs_channel', row_to_json(NEW)::text);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER logs_notify_trigger
AFTER INSERT ON logs
FOR EACH ROW
EXECUTE FUNCTION notify_new_log();

View File

@@ -0,0 +1,28 @@
ALTER TABLE "printerData" ADD COLUMN "print_delay_override" boolean DEFAULT false;
--> statement-breakpoint
CREATE OR REPLACE FUNCTION notify_new_log()
RETURNS trigger AS $$
BEGIN
PERFORM pg_notify('logs_channel', row_to_json(NEW)::text);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER logs_notify_trigger
AFTER INSERT ON logs
FOR EACH ROW
EXECUTE FUNCTION notify_new_log();
--> statement-breakpoint
CREATE OR REPLACE FUNCTION notify_new_label()
RETURNS trigger AS $$
BEGIN
PERFORM pg_notify('label_channel', row_to_json(NEW)::text);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER label_notify_trigger
AFTER INSERT ON prodlabels
FOR EACH ROW
EXECUTE FUNCTION notify_new_label();

File diff suppressed because it is too large Load Diff

View File

@@ -512,6 +512,13 @@
"when": 1757167736042,
"tag": "0072_round_black_knight",
"breakpoints": true
},
{
"idx": 73,
"version": "7",
"when": 1760708711258,
"tag": "0073_bumpy_dust",
"breakpoints": true
}
]
}

View File

@@ -1,89 +1,88 @@
import axios from "axios";
import { printerData } from "../../../../../database/schema/printers.js";
import { sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { printerData } from "../../../../../database/schema/printers.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { lstAuth } from "../../../../index.js";
import { db } from "../../../../../database/dbclient.js";
import { createLog } from "../../../logger/logger.js";
import { assignedPrinters } from "../../utils/checkAssignments.js";
export const updatePrinters = async () => {
const currentTime = new Date(Date.now());
const currentTime = new Date(Date.now());
// get the printers from prod
let url = await prodEndpointCreation(
"/public/v1.0/Administration/Printers"
);
// get the printers from prod
let url = await prodEndpointCreation("/public/v1.0/Administration/Printers");
const { data: prodPrinters, error: prodError } = await tryCatch(
axios.get(url, {
headers: {
Authorization: `Basic ${lstAuth}`,
"Content-Type": "application/json",
},
})
);
const { data: prodPrinters, error: prodError } = await tryCatch(
axios.get(url, {
headers: {
Authorization: `Basic ${lstAuth}`,
"Content-Type": "application/json",
},
}),
);
if (prodError || prodPrinters?.data.length > 10000) {
//console.log(prodError);
return {
success: false,
message: "there was an error getting the printers.",
data: prodError,
};
}
if (prodError || prodPrinters?.data.length > 10000) {
//console.log(prodError);
return {
success: false,
message: "there was an error getting the printers.",
data: prodError,
};
}
// do the printer update into our db
const prodPrinterInfo = prodPrinters?.data;
// do the printer update into our db
const prodPrinterInfo = prodPrinters?.data;
for (let i = 0; i < prodPrinterInfo.length; i++) {
const printerStuff: any = {
humanReadableId: prodPrinterInfo[i].humanReadableId,
name: prodPrinterInfo[i].name,
ipAddress: prodPrinterInfo[i].ipAddress,
port: prodPrinterInfo[i].port,
remark: prodPrinterInfo[i].remark,
processes: prodPrinterInfo[i].processes,
};
const { data, error } = await tryCatch(
db
.insert(printerData)
.values(printerStuff)
.onConflictDoUpdate({
target: printerData.humanReadableId,
set: {
//humanReadableId: prodPrinterInfo[i].humanReadableId,
name: prodPrinterInfo[i].name,
ipAddress: prodPrinterInfo[i].ipAddress,
port: prodPrinterInfo[i].port,
remark: prodPrinterInfo[i].remark,
processes: prodPrinterInfo[i].processes,
upd_date: sql`NOW()`,
//printDelay: "90", // need to remove in a couple weeks
},
})
);
for (let i = 0; i < prodPrinterInfo.length; i++) {
const printerStuff: any = {
humanReadableId: prodPrinterInfo[i].humanReadableId,
name: prodPrinterInfo[i].name,
ipAddress: prodPrinterInfo[i].ipAddress,
port: prodPrinterInfo[i].port,
remark: prodPrinterInfo[i].remark,
processes: prodPrinterInfo[i].processes,
};
const { data, error } = await tryCatch(
db
.insert(printerData)
.values(printerStuff)
.onConflictDoUpdate({
target: printerData.humanReadableId,
set: {
//humanReadableId: prodPrinterInfo[i].humanReadableId,
name: prodPrinterInfo[i].name,
ipAddress: prodPrinterInfo[i].ipAddress,
port: prodPrinterInfo[i].port,
remark: prodPrinterInfo[i].remark,
processes: prodPrinterInfo[i].processes,
upd_date: sql`NOW()`,
//printDelay: "90", // need to remove in a couple weeks
},
}),
);
if (error) {
createLog(
"error",
"lst",
"ocp",
`${
prodPrinterInfo[i].name
} encoutered and error adding/updating ${JSON.stringify(error)}`
);
}
createLog(
"debug",
"lst",
"ocp",
`${prodPrinterInfo[i].name} were just added/updated.`
);
}
if (error) {
createLog(
"error",
"lst",
"ocp",
`${
prodPrinterInfo[i].name
} encoutered and error adding/updating ${JSON.stringify(error)}`,
);
continue;
}
createLog(
"debug",
"lst",
"ocp",
`${prodPrinterInfo[i].name} were just added/updated.`,
);
}
await assignedPrinters();
//await assignedPrinters();
return { success: true, message: "Printers were just added or updated." };
return { success: true, message: "Printers were just added or updated." };
};

View File

@@ -14,7 +14,7 @@ param (
# example string to pass over, you must be in the script dir when you run this script. or it will fail to find the linked scripts
# If we do not pass plant to update over it will auto do all plants if we want a specific plant we need to do like below
# .\update-controllers.ps1 -App_Path "C:\Users\matthes01\Documents\lst" -Token "usstp1" -BuildController yes -PlantToUpdate "usstp1vms006" -Remote_Path "D$\LST"
# .\update-controllers.ps1 -App_Path "C:\Users\matthes01\Documents\lst" -Token "ushou1" -BuildController yes -PlantToUpdate "ushou1vms006" -Remote_Path "E$\LST"
# .\update-controllers.ps1 -App_Path "C:\Users\matthes01\Documents\lst" -Token "test3" -BuildController yes
$Plants = @(