Compare commits

...

42 Commits

Author SHA1 Message Date
dcfa56bdb9 fix(notify): fixed to plantto plant that would cause multiple emails to be sent and never update 2026-03-11 15:30:53 -05:00
ea92422bb1 feat(notification): plant to plant edi 2026-03-10 08:18:29 -05:00
2111a5fdc9 refactor(sql): changes to improve the job disable 2026-02-18 08:53:19 -06:00
6edd20585f refactor(stats): added in mastermacro version 2026-02-18 08:52:29 -06:00
a9759795c4 fix(scripts): changed the fake scanning to be more readable 2026-02-16 19:15:32 -06:00
32f26a1725 fix(quality): hoping to finally find the bud that kills me everynight 2026-02-16 19:05:41 -06:00
60533beed5 feat(sql): job disabling scripts 2026-02-16 19:05:08 -06:00
24ced97b6d feat(notification): added cycle count check 2026-02-16 19:04:38 -06:00
dc1d342799 fix(scanner): if host or port not sent over stop the connection right away 2026-02-16 19:04:04 -06:00
44d0cb63cf refactor(sql): moved new queries to there own folder to make it more easy to work and migrate 2026-02-16 19:01:23 -06:00
ace73fa919 refactor(sendmail): updated the smtp per alpla needs 2026-02-16 18:59:12 -06:00
316af4233f refactor(stats): added sheet version check in 2026-02-16 18:58:17 -06:00
36a805c652 refactor(scripts): create finance bol 2026-02-16 09:40:19 -06:00
460bc3d24a feat(query selector): queryselector from file based vs cp to ts filesz 2026-02-16 09:40:00 -06:00
ec201fcfb5 refactor(sql): full changes to localhost if on produciton server 2026-02-16 09:39:35 -06:00
914ad46c43 refactor(sql server): changes to look at localhost if in production 2026-02-16 09:38:55 -06:00
b96c546ed3 refactor(notify): changed to only allow max 100 errors in the email 2026-02-16 09:38:31 -06:00
29b3be41a1 build(notification): fixed fifo index ts errors 2026-02-16 09:38:10 -06:00
16edf58025 refactor(eom): changes to hist inv 2026-02-16 09:37:40 -06:00
775627f215 feat(scanner): tcp scanner connection based on env var no more db stuff 2026-02-16 09:37:14 -06:00
4e70fae69b refactor(api docs): added/changed docs 2026-02-16 09:36:44 -06:00
24dd109a21 fix(commandlog): changes to the log table 2026-02-13 16:08:23 -06:00
38b57a00cc refactor(datamart): article changes to add pet-g 2026-02-13 16:03:26 -06:00
f8070db95f fix(sqlserver): changed to proper pool connection 2026-02-13 16:02:43 -06:00
10e9dc430c fix(notification): limited to 1000 max errors 2026-02-13 15:59:38 -06:00
6b669ccd9c fix(labelinfo): corrected the query on label info for external 2026-02-13 14:51:15 -06:00
d9a10d98a1 refactor(sendmail): change the send mail function from noreply to donotreply 2026-02-13 14:50:44 -06:00
e64dc7c013 refactor(ocp): removed zechetti 2 from this silly thing for now 2026-02-13 14:50:07 -06:00
d63138d746 helper scripts 2026-02-03 15:40:51 -06:00
84a28f2d01 added relocate 2026-02-03 15:40:41 -06:00
9be6614972 fix(ocp): more material check work 2026-01-19 07:50:27 -06:00
9d0db71f6a fix(datamart): psiPlanning was looking at thertical but a rare case this would look at last years da
and be missed and not pull correct data, switched to plan end
2026-01-15 13:28:18 -06:00
3cc55436f3 refactor(psi): old planning numbers to revert back to maybe 2026-01-08 20:11:40 -06:00
124fde07e0 refactor(psi): planning numbers refactored to deal with a bad downsync that caused negative numbers 2026-01-08 20:08:31 -06:00
b15d0d7322 refactor(datamart): delivery by date range updates 2026-01-08 20:08:03 -06:00
0680f332fb refactor(manual print): added new option for mulitple tags on pallet 2026-01-08 20:07:28 -06:00
46bf310dce refactor(datamart): changed the getDelbyDateRange to the new 2.0 way 2026-01-05 15:17:15 -06:00
0dda6ae744 fix(notifications): fixed a type in the alert being sent out only showing 1 alert instead of many 2026-01-05 10:27:09 -06:00
1b59cdd3a4 fix(psi): correcrtions to account for the tiem offset in the psi 2026-01-02 11:07:45 -06:00
56934216f7 fix(sql): fix connection issues in the sql connection loosing it 2025-12-31 07:54:26 -06:00
e8a2ef8b85 refactor(ocp): plc reading changes to disconnect and reconnect
it was found that there were some errors that spammed the log and caused the server to actually stop
responding and crash weirdly so added a disconnect and reconnect back. so we can figure out whats
going on.
2025-12-30 10:55:28 -06:00
6cbffa4ac5 feat(notification): error monitoring
if there are more than 10 errors in a 15min window sends email to alert someone
2025-12-30 10:54:09 -06:00
73 changed files with 4099 additions and 1292 deletions

View File

@@ -49,9 +49,11 @@
"go.formatTool": "goimports", "go.formatTool": "goimports",
"cSpell.words": [ "cSpell.words": [
"acitve", "acitve",
"actaully",
"alpla", "alpla",
"alplamart", "alplamart",
"alplaprod", "alplaprod",
"autoconsume",
"intiallally", "intiallally",
"ppoo", "ppoo",
"prodlabels", "prodlabels",

View File

@@ -0,0 +1,16 @@
meta {
name: Error logging
type: http
seq: 4
}
get {
url: {{urlv2}}/api/notify/toomanyerrors
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,22 @@
meta {
name: sscc
type: http
seq: 4
}
post {
url: {{url}}/lst/old/api/logistics/getsscc
body: json
auth: inherit
}
body:json {
{
"runningNr": ""
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,22 @@
meta {
name: PSI -planning data
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/datamart/psiplanningdata?avs=118,120&startDate=12/1/2025&endDate=12/31/2026
body: none
auth: inherit
}
params:query {
avs: 118,120
startDate: 12/1/2025
endDate: 12/31/2026
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -5,11 +5,12 @@ meta {
} }
get { get {
url: url: {{url}}/lst/api/logistics/getsscc
body: none body: none
auth: inherit auth: inherit
} }
settings { settings {
encodeUrl: true encodeUrl: true
timeout: 0
} }

View File

@@ -1,6 +1,6 @@
{ {
"version": "1", "version": "1",
"name": "LogisticsSupportTool_API_DOCS", "name": "lstv2",
"type": "collection", "type": "collection",
"ignore": [ "ignore": [
"node_modules", "node_modules",

View File

@@ -1,5 +1,5 @@
vars { vars {
url: https://uslim1prod.alpla.net url: http://localhost:5500
session_cookie: session_cookie:
urlv2: http://usbow1vms006:3000 urlv2: http://usbow1vms006:3000
jwtV2: jwtV2:

View File

@@ -0,0 +1,24 @@
meta {
name: bookout
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/logistics/bookout
body: json
auth: none
}
body:json {
{
"runningNr": "1865027",
"reason": "packer printed premature"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 7
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,24 @@
meta {
name: relocate
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/relocate
body: json
auth: inherit
}
body:json {
{
"runningNr": "56121541",
"laneID": "30006"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,24 @@
meta {
name: removeAsWaste
type: http
seq: 3
}
post {
url: {{url}}/lst/old/api/logistics/removeasreusable
body: json
auth: none
}
body:json {
{
"runningNr": "1865018",
"reason": "validating stockout"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -78,7 +78,7 @@ const main = async () => {
// connect to the prod sql // connect to the prod sql
console.log("Connecting to the sql server"); console.log("Connecting to the sql server");
await initializeProdPool();
// express app // express app
const app = express(); const app = express();
@@ -184,7 +184,7 @@ const main = async () => {
// swaggerUi.serve, // swaggerUi.serve,
// swaggerUi.setup(openapiSpec, swaggerUiOptions), // swaggerUi.setup(openapiSpec, swaggerUiOptions),
// ); // );
initializeProdPool();
setupSwagger(app, basePath) setupSwagger(app, basePath)
app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build"))); app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build")));
app.use( app.use(

View File

@@ -58,6 +58,8 @@ router.get("/", async (req, res) => {
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${( memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024 used.rss / 1024 / 1024
).toFixed(2)} MB`, ).toFixed(2)} MB`,
eomFGPkgSheetVersion: 1, // this is the excel file version when we have a change to the macro we want to grab this
masterMacroFile: 1, // this is the excel file version when we have a change to the macro we want to grab this
}); });
}); });

View File

@@ -1,123 +1,125 @@
import type { Address } from "nodemailer/lib/mailer/index.js";
import type { Transporter } from "nodemailer"; import type { Transporter } from "nodemailer";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import type Mail from "nodemailer/lib/mailer/index.js";
import os from "os";
import nodemailer from "nodemailer"; import nodemailer from "nodemailer";
import type Mail from "nodemailer/lib/mailer/index.js";
import type { Address } from "nodemailer/lib/mailer/index.js";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import hbs from "nodemailer-express-handlebars";
import os from "os";
import path from "path"; import path from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
import { promisify } from "util"; import { promisify } from "util";
import hbs from "nodemailer-express-handlebars";
import { createLogger } from "../../logger/logger.js"; import { createLogger } from "../../logger/logger.js";
interface HandlebarsMailOptions extends Mail.Options { interface HandlebarsMailOptions extends Mail.Options {
template: string; template: string;
context: Record<string, unknown>; context: Record<string, unknown>;
} }
interface EmailData { interface EmailData {
email: string; email: string;
subject: string; subject: string;
template: string; template: string;
context: Record<string, unknown>; context: Record<string, unknown>;
} }
export const sendEmail = async (data: EmailData): Promise<any> => { export const sendEmail = async (data: EmailData): Promise<any> => {
const log = createLogger({ module: "pkg", subModule: "sendMail" }); const log = createLogger({ module: "pkg", subModule: "sendMail" });
let transporter: Transporter; let transporter: Transporter;
let fromEmail: string | Address; let fromEmail: string | Address;
if ( // if (
os.hostname().includes("OLP") && // os.hostname().includes("OLP") &&
process.env.EMAIL_USER && // process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD // process.env.EMAIL_PASSWORD
) { // ) {
transporter = nodemailer.createTransport({ // transporter = nodemailer.createTransport({
service: "gmail", // service: "gmail",
auth: { // auth: {
user: process.env.EMAIL_USER, // user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD, // pass: process.env.EMAIL_PASSWORD,
}, // },
//debug: true, // //debug: true,
}); // });
// update the from email // // update the from email
fromEmail = process.env.EMAIL_USER; // fromEmail = process.env.EMAIL_USER;
} else { // } else {
// convert to the correct plant token. // // convert to the correct plant token.
let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`; //let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
//const testServers = ["vms036", "VMS036"]; //const testServers = ["vms036", "VMS036"];
if (os.hostname().includes("VMS036")) { // if (os.hostname().includes("VMS036")) {
host = "USMCD1-smtp.alpla.net"; // host = "USMCD1-smtp.alpla.net";
} // }
// if (plantToken[0].value === "usiow2") { // if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net"; // host = "USIOW1-smtp.alpla.net";
// } // }
transporter = nodemailer.createTransport({ transporter = nodemailer.createTransport({
host: host, host: "smtp.azurecomm.net",
port: 25, port: 587,
rejectUnauthorized: false, //rejectUnauthorized: false,
//secure: false, tls: {
// auth: { minVersion: "TLSv1.2",
// user: "alplaprod", },
// pass: "obelix", auth: {
// }, user: "donotreply@mail.alpla.com",
debug: true, pass: process.env.SMTP_PASSWORD,
} as SMTPTransport.Options); },
debug: true,
} as SMTPTransport.Options);
// update the from email // update the from email
fromEmail = `noreply@alpla.com`; fromEmail = `DoNotReply@mail.alpla.com`;
} //}
// creating the handlbar options // creating the handlbar options
const viewPath = path.resolve( const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)), path.dirname(fileURLToPath(import.meta.url)),
"./views/" "./views/",
); );
const handlebarOptions = { const handlebarOptions = {
viewEngine: { viewEngine: {
extname: ".hbs", extname: ".hbs",
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory //layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
defaultLayout: "", // Specify the default layout defaultLayout: "", // Specify the default layout
partialsDir: viewPath, partialsDir: viewPath,
}, },
viewPath: viewPath, viewPath: viewPath,
extName: ".hbs", // File extension for Handlebars templates extName: ".hbs", // File extension for Handlebars templates
}; };
transporter.use("compile", hbs(handlebarOptions)); transporter.use("compile", hbs(handlebarOptions));
const mailOptions: HandlebarsMailOptions = { const mailOptions: HandlebarsMailOptions = {
from: fromEmail, from: fromEmail,
to: data.email, to: data.email,
subject: data.subject, subject: data.subject,
//text: "You will have a reset token here and only have 30min to click the link before it expires.", //text: "You will have a reset token here and only have 30min to click the link before it expires.",
//html: emailTemplate("BlakesTest", "This is an example with css"), //html: emailTemplate("BlakesTest", "This is an example with css"),
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs') template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
context: data.context, context: data.context,
}; };
// now verify and send the email // now verify and send the email
const sendMailPromise = promisify(transporter.sendMail).bind(transporter); const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
try { try {
// Send email and await the result // Send email and await the result
const info = await sendMailPromise(mailOptions); const info = await sendMailPromise(mailOptions);
log.info(null, `Email was sent to: ${data.email}`); log.info(null, `Email was sent to: ${data.email}`);
return { success: true, message: "Email sent.", data: info }; return { success: true, message: "Email sent.", data: info };
} catch (err) { } catch (err) {
console.log(err); console.log(err);
log.error( log.error(
{ error: err }, { error: err },
`Error sending Email to : ${data.email}` `Error sending Email to : ${data.email}`,
); );
return { success: false, message: "Error sending email.", error: err }; return { success: false, message: "Error sending email.", error: err };
} }
}; };

View File

@@ -12,14 +12,15 @@ import { LstCard } from "../../../extendedUi/LstCard";
export default function Relocate() { export default function Relocate() {
const [bookingIn, setBookingIn] = useState(false); const [bookingIn, setBookingIn] = useState(false);
const form = useForm({ const form = useForm({
defaultValues: { runningNr: " ", lane: "" }, defaultValues: { runningNr: " ", laneID: "" },
onSubmit: async ({ value }) => { onSubmit: async ({ value }) => {
// Do something with form data // Do something with form data
setBookingIn(true); setBookingIn(true);
try { try {
const res = await axios.post("/lst/old/api/ocp/bookin", { const res = await axios.post("/lst/old/api/logistics/relocate", {
runningNr: parseInt(value.runningNr), runningNr: parseInt(value.runningNr),
laneID: parseInt(value.laneID),
}); });
if (res.data.success) { if (res.data.success) {
@@ -27,15 +28,15 @@ export default function Relocate() {
form.reset(); form.reset();
setBookingIn(false); setBookingIn(false);
} else { } else {
console.log(res.data.data.errors); console.log(res.data.message);
toast.error(res.data.data.errors[0]?.message); toast.error(res.data.message);
form.reset(); //form.reset();
setBookingIn(false); setBookingIn(false);
} }
} catch (error) { } catch (error) {
console.log(error); console.log(error);
toast.error( toast.error(
"There was an error booking in pallet please validate you entered the correct info and try again.", "There was an error relocating the pallet please validate the data.",
); );
setBookingIn(false); setBookingIn(false);
} }
@@ -58,7 +59,7 @@ export default function Relocate() {
validators={{ validators={{
// We can choose between form-wide and field-specific validators // We can choose between form-wide and field-specific validators
onChange: ({ value }) => onChange: ({ value }) =>
value.length > 2 value.length > 0
? undefined ? undefined
: "Please enter a valid running number", : "Please enter a valid running number",
}} }}
@@ -83,19 +84,17 @@ export default function Relocate() {
}} }}
/> />
<form.Field <form.Field
name="lane" name="laneID"
validators={{ validators={{
// We can choose between form-wide and field-specific validators // We can choose between form-wide and field-specific validators
onChange: ({ value }) => onChange: ({ value }) =>
value.length > 2 value.length > 0 ? undefined : "Please enter a valid lane ID",
? undefined
: "Please enter a valid running number",
}} }}
children={(field) => { children={(field) => {
return ( return (
<div className=""> <div className="">
<Label htmlFor="runningNr" className="mb-2"> <Label htmlFor="laneID" className="mb-2">
Enter lane Enter lane ID
</Label> </Label>
<Input <Input
name={field.name} name={field.name}

View File

@@ -7,14 +7,18 @@ export default function HelperPage() {
return ( return (
<div className="flex flex-wrap m-2 justify-center"> <div className="flex flex-wrap m-2 justify-center">
<div className="m-1"> <div className="m-1">
<Bookin /> <div className="m-1 ">
<Bookin />
</div>
<div className="w-96 m-1">
<Relocate />
</div>
</div> </div>
<div className="m-1"> <div className="m-1">
{url === "localhost" && ( {url === "localhost" && (
<div className="m-1"> <div className="m-1">
<RemoveAsNonReusable /> <RemoveAsNonReusable />
<Relocate />
</div> </div>
)} )}
</div> </div>

View File

@@ -30,6 +30,7 @@ import { useSettingStore } from "../../../-lib/store/useSettings";
const printReason = [ const printReason = [
{ key: "printerIssue", label: "Printer Related" }, { key: "printerIssue", label: "Printer Related" },
{ key: "missingRfidTag", label: "Missing or incorrect tag" }, { key: "missingRfidTag", label: "Missing or incorrect tag" },
{ key: "multipleTags", label: "More than one tag on pallet." },
{ key: "rfidMissScan", label: "Missed Scan from RFID reader" }, { key: "rfidMissScan", label: "Missed Scan from RFID reader" },
{ key: "strapper", label: "Strapper Error" }, { key: "strapper", label: "Strapper Error" },
{ key: "manualCheck", label: "20th pallet check" }, { key: "manualCheck", label: "20th pallet check" },

View File

@@ -1,20 +1,20 @@
import { text, pgTable, timestamp, uuid, jsonb } from "drizzle-orm/pg-core"; import { jsonb, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod"; import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { z } from "zod"; import { z } from "zod";
export const commandLog = pgTable( export const commandLog = pgTable(
"commandLog", "commandLog",
{ {
commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(), commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(),
commandUsed: text("commandUsed").notNull(), commandUsed: text("commandUsed").notNull(),
bodySent: jsonb("bodySent").default([]), bodySent: jsonb("bodySent").default([]),
reasonUsed: text("reasonUsed"), reasonUsed: text("reasonUsed"),
add_at: timestamp("add_Date").defaultNow(), addDate: timestamp("add_Date").defaultNow(),
}, },
(table) => [ (table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name), // uniqueIndex("role_name").on(table.name),
] ],
); );
// Schema for inserting a user - can be used to validate API requests // Schema for inserting a user - can be used to validate API requests

View File

@@ -10,7 +10,8 @@
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts", "dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts", "dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server", "build": "npm run build:server",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ", "build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y && npm run build:copySql",
"build:copySql": "xcopy server\\services\\sqlServer\\querys\\newQueries dist\\server\\services\\sqlServer\\querys\\newQueries\\ /E /I /Y ",
"build:frontend": "cd frontend && npm run build", "build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild", "build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts", "copy:scripts": "tsx server/scripts/copyScripts.ts",

View File

@@ -0,0 +1,191 @@
/**
* Using this to make a scanner connection to the server.
*/
import net from "net";
interface QueuedCommand {
command: string;
resolve: (value: string) => void;
reject: (reason?: any) => void;
timeout: NodeJS.Timeout;
}
const STX = "\x02";
const ETX = "\x03";
// const prodIP = process.env.SERVER_IP as string;
// const prodPort = parseInt(process.env.SCANNER_PORT || "50000", 10);
// const scannerID = `${process.env.SCANNER_ID}@`;
//const scannerCommand = "AlplaPRODcmd00000042#000028547"; // top of the picksheet
export class ScannerClient {
private socket = new net.Socket();
private connected = false;
private queue: QueuedCommand[] = [];
private processing = false;
private incomingBuffer = "";
constructor(
private host: string,
private port: number,
private scannerId: string,
) {
this.initialize();
}
private initialize() {
if (!this.host || !this.port) {
console.log("Host or port is missing");
return;
}
this.socket.connect(this.port, this.host, () => {
console.info("Connected to scanner");
this.connected = true;
});
this.socket.on("data", (data) => this.handleData(data));
this.socket.on("close", () => {
console.log("Scanner connection closed");
this.connected = false;
});
this.socket.on("error", (err) => {
console.error("Scanner error:", err);
});
}
// ✅ Public method you use
public scan(command: string): Promise<string> {
if (!this.connected) {
return Promise.reject("Scanner not connected");
}
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
this.processing = false;
reject("Scanner timeout");
this.processQueue();
}, 5000); // 5s safety timeout
this.queue.push({
command,
resolve,
reject,
timeout,
});
this.processQueue();
});
}
// ✅ Ensures strict FIFO processing
private processQueue() {
if (this.processing) return;
if (this.queue.length === 0) return;
this.processing = true;
const current = this.queue[0];
const message = Buffer.from(
`${STX}${this.scannerId}${current.command}${ETX}`,
"ascii",
);
this.socket.write(message);
}
// ✅ Handles full STX/ETX framed responses
private handleData(data: Buffer) {
console.log(
"ASCII:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
const current = this.queue.shift();
if (current) {
clearTimeout(current.timeout);
current.resolve(data.toString("ascii"));
}
this.processing = false;
this.processQueue();
}
}
export const scanner = new ScannerClient(
process.env.SERVER_IP!,
parseInt(process.env.SCANNER_PORT!, 10),
`${process.env.SCANNER_ID}@`,
);
// export const connectToScanner = () => {
// if (!process.env.SERVER_IP || !process.env.SCANNER_PORT) {
// return {
// success: false,
// message: "Missing ServerIP or ServerPort",
// };
// }
// scanner.connect(prodPort, prodIP, () => {
// console.log("Connected to scanner");
// connected = true;
// });
// };
// export const scan = async (command: string) => {
// if (!connected) {
// return {
// success: false,
// message: "Scanner is not connected, please contact admin",
// };
// }
// if (inScanCommand) {
// bufferCommands.push({ timeStamp: new Date(Date.now()), command: command });
// }
// // we are going to set to scanning
// inScanCommand = true;
// const message = Buffer.from(`${STX}${scannerID}${command}${ETX}`, "ascii");
// scanner.write(message);
// await new Promise((resolve) => setTimeout(resolve, 750));
// inScanCommand = false;
// if (bufferCommands.length > 0) {
// await scan(bufferCommands[0].command);
// bufferCommands.shift();
// }
// return {
// success: true,
// message: "Scan completed",
// };
// };
// scanner.on("data", async (data) => {
// console.log(
// "Response:",
// data
// .toString("ascii")
// .replace(/\x00/g, "") // remove null bytes
// .replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
// .trim(),
// );
// });
// scanner.on("close", () => {
// console.log("Connection closed");
// });
// scanner.on("error", (err) => {
// console.error("Scanner error:", err);
// });

View File

@@ -1,84 +1,95 @@
import { addDays, format } from "date-fns";
import { formatInTimeZone } from "date-fns-tz";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js"; import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js"; import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js"; import { query } from "../../sqlServer/prodSqlServer.js";
import { deliveryByDateRange } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js"; import { deliveryByDateRange } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js";
import { addDays, format } from "date-fns";
export const getDeliveryByDateRange = async (data: any | null) => { export const getDeliveryByDateRange = async (data: any | null) => {
// const { data: plantToken, error: plantError } = await tryCatch( // const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken")) // db.select().from(settings).where(eq(settings.name, "plantToken"))
// ); // );
// if (plantError) { // if (plantError) {
// return { // return {
// success: false, // success: false,
// message: "Error getting Settings", // message: "Error getting Settings",
// data: plantError, // data: plantError,
// }; // };
// } // }
let deliverys: any = []; let deliverys: any = [];
let updatedQuery = deliveryByDateRange; let updatedQuery = deliveryByDateRange;
// start days can be sent over // start days can be sent over
if (data?.start) { if (data?.start) {
updatedQuery = updatedQuery.replaceAll("[startDate]", data.start[0]); updatedQuery = updatedQuery.replaceAll("[startDate]", data.start[0]);
} else { } else {
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1"); updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
} }
// end days can be sent over // end days can be sent over
if (data?.end) { if (data?.end) {
updatedQuery = updatedQuery.replaceAll("[endDate]", data.end[0]); updatedQuery = updatedQuery.replaceAll("[endDate]", data.end[0]);
} else { } else {
const defaultEndDate = format( const defaultEndDate = format(addDays(new Date(Date.now()), 5), "yyyy-M-d");
addDays(new Date(Date.now()), 5), updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
"yyyy-M-d" }
);
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
}
try { try {
const res: any = await query( const res: any = await query(updatedQuery, "Get Delivery by date range");
updatedQuery, deliverys = res.data;
"Get Delivery by date range" //console.log(res.data);
); } catch (error) {
deliverys = res.data; console.log(error);
//console.log(res.data); return {
} catch (error) { success: false,
console.log(error); message: "All Deliveries within the range.",
return { data: error,
success: false, };
message: "All Deliveries within the range.", }
data: error,
};
}
if (!data) { // if (!data) {
deliverys = deliverys.splice(1000, 0); // deliverys = deliverys.splice(1000, 0);
} // }
// add plant token in // add plant token in
// const pOrders = deliverys.map((item: any) => { // const pOrders = deliverys.map((item: any) => {
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", { // // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
// // month: "numeric", // // month: "numeric",
// // day: "numeric", // // day: "numeric",
// // year: "numeric", // // year: "numeric",
// // hour: "2-digit", // // hour: "2-digit",
// // minute: "2-digit", // // minute: "2-digit",
// // hour12: false, // // hour12: false,
// // }); // // });
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0]; // //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0]; // const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0]; // //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0]; // const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
// return { // return {
// plantToken: plantToken[0].value, // plantToken: plantToken[0].value,
// ...item, // ...item,
// loadingDate: dateCon, // loadingDate: dateCon,
// deliveryDate: delDate, // deliveryDate: delDate,
// }; // };
// }); // });
return { success: true, message: "Current open orders", data: deliverys }; return {
success: true,
message: "Current open orders",
data: deliverys.map((i: any) => {
const orderDate = new Date(i.OrderDate);
const delDate = new Date(i.DeliveryDate);
const loadDate = new Date(i.LoadingDate);
return {
...i,
OrderDate: format(orderDate, "yyyy-MM-dd HH:mm"),
DeliveryDate: format(delDate, "yyyy-MM-dd HH:mm"),
LoadingDate: format(loadDate, "yyyy-MM-dd HH:mm"),
dbDate: i.DeliveryDate,
};
}),
};
}; };

View File

@@ -1,84 +1,97 @@
import { addDays, format } from "date-fns";
import { query } from "../../sqlServer/prodSqlServer.js"; import { query } from "../../sqlServer/prodSqlServer.js";
import { deliveryByDateRangeAndAv } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js"; import { deliveryByDateRangeAndAv } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js";
import { addDays, format } from "date-fns";
export const getDeliveryByDateRangeAndAv = async ( export const getDeliveryByDateRangeAndAv = async (
avs: string, avs: string,
startDate: string, startDate: string,
endDate: string endDate: string,
) => { ) => {
// const { data: plantToken, error: plantError } = await tryCatch( // const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken")) // db.select().from(settings).where(eq(settings.name, "plantToken"))
// ); // );
// if (plantError) { // if (plantError) {
// return { // return {
// success: false, // success: false,
// message: "Error getting Settings", // message: "Error getting Settings",
// data: plantError, // data: plantError,
// }; // };
// } // }
let deliverys: any = []; let deliverys: any = [];
let updatedQuery = deliveryByDateRangeAndAv; let updatedQuery = deliveryByDateRangeAndAv;
// start days can be sent over // start days can be sent over
if (startDate) { if (startDate) {
updatedQuery = updatedQuery.replaceAll("[startDate]", startDate); updatedQuery = updatedQuery.replaceAll("[startDate]", startDate);
} else { } else {
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1"); updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
} }
// end days can be sent over // end days can be sent over
if (endDate) { if (endDate) {
updatedQuery = updatedQuery.replaceAll("[endDate]", endDate); updatedQuery = updatedQuery.replaceAll("[endDate]", endDate);
} else { } else {
const defaultEndDate = format( const defaultEndDate = format(addDays(new Date(Date.now()), 5), "yyyy-M-d");
addDays(new Date(Date.now()), 5), updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
"yyyy-M-d" }
);
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
}
try { try {
const res: any = await query( const res: any = await query(
updatedQuery.replace("[articles]", avs), updatedQuery.replace("[articles]", avs),
"Get Delivery by date range" "Get Delivery by date range",
); );
deliverys = res.data; deliverys = res.data;
//console.log(res.data); //console.log(res.data);
} catch (error) { } catch (error) {
console.log(error); console.log(error);
return { return {
success: false, success: false,
message: "All Deliveries within the range.", message: "All Deliveries within the range.",
data: error, data: error,
}; };
} }
// if (!data) { // if (!data) {
// deliverys = deliverys.splice(1000, 0); // deliverys = deliverys.splice(1000, 0);
// } // }
// add plant token in // add plant token in
// const pOrders = deliverys.map((item: any) => { // const pOrders = deliverys.map((item: any) => {
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", { // // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
// // month: "numeric", // // month: "numeric",
// // day: "numeric", // // day: "numeric",
// // year: "numeric", // // year: "numeric",
// // hour: "2-digit", // // hour: "2-digit",
// // minute: "2-digit", // // minute: "2-digit",
// // hour12: false, // // hour12: false,
// // }); // // });
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0]; // //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0]; // const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0]; // //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0]; // const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
// return { // return {
// plantToken: plantToken[0].value, // plantToken: plantToken[0].value,
// ...item, // ...item,
// loadingDate: dateCon, // loadingDate: dateCon,
// deliveryDate: delDate, // deliveryDate: delDate,
// }; // };
// }); // });
return { success: true, message: "Current open orders", data: deliverys }; return {
success: true,
message: "Current open orders",
data: deliverys.map((i: any) => {
const orderDate = new Date(i.OrderDate);
const delDate = new Date(i.DeliveryDate);
const loadDate = new Date(i.LoadingDate);
return {
...i,
OrderDate: format(orderDate, "yyyy-MM-dd HH:mm"),
DeliveryDate: format(delDate, "yyyy-MM-dd HH:mm"),
LoadingDate: format(loadDate, "yyyy-MM-dd HH:mm"),
dbDate: i.DeliveryDate,
};
}),
};
}; };

View File

@@ -1,3 +1,4 @@
import { format } from "date-fns-tz/format";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js"; import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js"; import { query } from "../../sqlServer/prodSqlServer.js";
@@ -37,6 +38,15 @@ export const getGetPSIForecastData = async (customer: string) => {
return { return {
success: true, success: true,
message: "PSI forecast Data", message: "PSI forecast Data",
data: articles, data: articles.map((i: any) => {
const requirementDate = new Date(i.requirementDate);
return {
...i,
requirementDate: format(requirementDate, "yyyy-MM-dd"),
dbDate: i.requirementDate,
};
}),
}; };
}; };

View File

@@ -2,62 +2,72 @@ import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js"; import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js"; import { query } from "../../sqlServer/prodSqlServer.js";
import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js"; import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js";
import { improvedPsiPlanningInfo } from "./psiPlanningDataImproved.js";
// type ArticleData = { // type ArticleData = {
// id: string // id: string
// } // }
export const psiGetPlanningData = async ( export const psiGetPlanningData = async (
avs: string, avs: string,
startDate: string, startDate: string,
endDate: string endDate: string,
) => { ) => {
let articles: any = []; let articles: any = [];
if (!avs) { if (!avs) {
return { return {
success: false, success: false,
message: `Missing av's please send at least one over`, message: `Missing av's please send at least one over`,
data: [], data: [],
}; };
} }
const { data, error } = (await tryCatch( const { data, error } = (await tryCatch(
query( query(
planningNumbersByAVDate planningNumbersByAVDate
.replace("[articles]", avs) .replace("[articles]", avs)
.replace("[startDate]", startDate) .replace("[startDate]", startDate)
.replace("[endDate]", endDate), .replace("[endDate]", endDate),
"PSI planning info" "PSI planning info",
) ),
)) as any; )) as any;
if (error) { // improvedPsiPlanningInfo({
createLog( // avs,
"error", // startDate,
"datamart", // endDate,
"datamart", // });
`There was an error getting the planning info: ${JSON.stringify( if (error) {
error createLog(
)}` "error",
); "datamart",
return { "datamart",
success: false, `There was an error getting the planning info: ${JSON.stringify(error)}`,
messsage: `There was an error getting the planning info`, );
data: error, return {
}; success: false,
} messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data.data; // TODO: if we are not running planning we no pass the old structure if we are running new planning use the below improved version that makes sure we dont have negative numebrs.
articles = data.data;
return { return {
success: true, success: true,
message: "PSI planning Data", message: "PSI planning Data",
data: articles.map((n: any) => { data: await improvedPsiPlanningInfo({
if (n.PalDay) { avs,
return { ...n, PalDay: n.PalDay.toFixed(2) }; startDate,
} endDate,
}),
// data: articles.map((n: any) => {
// if (n.PalDay) {
// return { ...n, PalDay: n.PalDay.toFixed(2) };
// }
return n; // return n;
}), // }),
}; };
}; };

View File

@@ -0,0 +1,171 @@
import { format } from "date-fns-tz";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
const improvedQuery = `
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
SELECT
[RunningNumber] as lot
,[ProfitCentreDescription]
,[MachineDescription]
,[ArticleHumanReadableId]
,[ArticleDescription]
,[DeliveryAddressHumanReadableId]
,[DeliveryAddressDescription]
,[MouldHumanReadableId]
,[BlowheadHumanReadableId1]
,[PackagingInstructionHumanReadableId]
,[PackagingInstructionDescription]
,[MainMaterialHumanReadableId]
,[MainMaterialDescription]
,[CompoundHumanReadableId]
,[CompoundDescription]
,[ProductionLotState]
,[PlanType]
,[ProducedQuantityLoadingUnit]
,[ProducedQuantityPieces]
,[PlanStart]
,[PlanEnd]
,[ProdStart]
,[TheoreticEnd]
,[ProdDuration]
,[SetupDuration]
,[StartupDuration]
,[NetEquipmentEfficiency]
,[UtilisationDuration]
,[CycleTime]
,[Cavities]
,[FixedQuantity]
,[ProducedQuantityTrucks]
,[ProducedQuantityTradeUnit]
,[MaxRegrind]
,[Conflict]
,[ProductionOrderHumanReadableId]
,[ProductionDataImportSource]
,[Remark]
,[BlowheadDescription1]
,[MouldDescription]
,[ProcessLossPercentage]
,[SetupTypeNumberOfPersons]
,[UnplannedDowntimePercentage]
,[PlanQuantityLoadingUnit]
,[PlanQuantityPieces]
,[PlanQuantityTradeUnit]
,[PlanQuantityTrucks]
,[PublishState]
,[LastChange]
,[MaterialConsumed]
,[MaterialStaged]
,[MachineLocation]
,[HasPrioritization]
,[ArticleAlias]
FROM [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] with (nolock)
where PlanEnd between @StartDate and @EndDate
and ArticleHumanReadableId in ([articles])
and PublishState = 1
order by PlanStart
`;
export const improvedPsiPlanningInfo = async (something: any) => {
const { data, error } = (await tryCatch(
query(
improvedQuery
.replace("[articles]", something.avs)
.replace("[startDate]", something.startDate)
.replace("[endDate]", something.endDate),
"PSI planning info",
),
)) as any;
// add error handling in later here
return splitProduction(data.data);
};
const splitProduction = (runs: any) => {
const results: any = [];
const WORKDAY_START_HOUR = 7; // 07:00 start well later get this from the shift def
runs.forEach((e: any) => {
const {
PlanStart,
PlanEnd,
PlanQuantityPieces,
ArticleHumanReadableId,
ProdDuration,
} = e;
const prodStart: any = new Date(PlanStart);
const prodEnd: any = new Date(PlanEnd);
const prodDuration = ProdDuration
? ProdDuration * 60 * 60 * 1000
: prodEnd - prodStart;
// get the prod date the production falls under
function getProdDayStart(date: Date) {
const d = new Date(date);
d.setHours(WORKDAY_START_HOUR, 0, 0, 0);
if (date.getHours() < WORKDAY_START_HOUR) {
// before 07:00, belongs to previous calendar day
d.setDate(d.getDate() - 1);
}
return d;
}
// current pointer starts at the work-day start that contains our start time
let currentStart = new Date(prodStart);
let prodDayStart = getProdDayStart(currentStart);
while (prodDayStart < prodEnd) {
// 1⃣ The next days start = prodDayStart + 1 day at 07:00
const nextProdDayStart = new Date(prodDayStart);
nextProdDayStart.setDate(nextProdDayStart.getDate() + 1);
// 2⃣ Segment end is either the next work-day start or the actual end, whichever is sooner
const segmentEnd = new Date(
Math.min(nextProdDayStart.getTime(), prodEnd.getTime()),
);
// 3⃣ Determine overlap window within (startTime..endTime)
const segStart: any = new Date(
Math.max(prodDayStart.getTime(), prodStart.getTime()),
);
const segEnd: any = segmentEnd;
if (segEnd > segStart) {
const segMs = segEnd - segStart;
const proportion = segMs / prodDuration;
const qty = PlanQuantityPieces * proportion;
const pal = e.PlanQuantityLoadingUnit * proportion;
results.push({
Article: ArticleHumanReadableId,
Description: e.ArticleAlias,
MachineId: e.MachineLocation,
MachineName: e.MachineDescription,
LotNumber: e.lot,
ProductionDay: format(prodDayStart, "M/d/yyyy"),
TotalPlanned: e.PlanQuantityPieces,
// PlanEnd,
// TheoreticEnd,
QTYPerDay: parseInt(qty.toFixed(0)),
PalDay: parseFloat(pal.toFixed(2)),
finished: e.ProductionLotState === 3 ? 1 : 0,
cavities: e.Cavities,
//prodDuration,
});
}
// move to next production-day window
prodDayStart = nextProdDayStart;
}
});
return results;
};

View File

@@ -145,7 +145,7 @@ app.openapi(
return c.json({ return c.json({
success: true, success: true,
message: "All Current Active Querys.", message: "All Current Active Querys.",
sheetVersion: 2.8, sheetVersion: 2.8, // TODO: when this gets switched change this
data: current, data: current,
}); });
}, },

View File

@@ -63,10 +63,10 @@ setTimeout(async () => {
// the time we want to run the hostircal data should be the same time the historical data run on the server // the time we want to run the hostircal data should be the same time the historical data run on the server
// getting this from the shift time // getting this from the shift time
if (process.env.NODE_ENV?.trim() !== "production") { //if (process.env.NODE_ENV?.trim() !== "production") {
setTimeout(() => { setTimeout(() => {
historicalInvIMmport(); historicalInvIMmport();
}, 15 * 1000); }, 15 * 1000);
} //}
export default app; export default app;

View File

@@ -0,0 +1,155 @@
import axios from "axios";
import net from "net";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
type Data = {
runningNr: number;
reason: string;
user: string;
};
export const bookOutPallet = async (data: Data) => {
const { runningNr, reason, user } = data;
if (!reason || reason.length < 4) {
return {
success: false,
status: 400,
message: "The reason provided is to short",
data: [],
};
}
const queryCheck = sqlQuerySelector("inventoryInfo.query");
if (!queryCheck.success) {
return {
success: false,
status: 400,
message: queryCheck.message,
data: data,
};
}
const { data: label, error: labelError } = (await tryCatch(
query(
queryCheck.query!.replace("[runningNr]", `${runningNr}`),
"labelQuery",
),
)) as any;
if (labelError) {
return {
success: false,
status: 400,
message: labelError.message,
data: labelError,
};
}
// check if we are in ppoo
if (label.data.length <= 0) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently in ppoo, please move to ppoo before trying to book-out`,
data: [],
};
}
// check if the label is blocked for coa.
if (
label.data[0].blockingReason &&
!label.data[0].blockingReason?.includes("COA")
) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently blocked for coa, to get this pallet booked out please take the label to quality to be released then you can book-out.`,
data: [],
};
}
if (label.data[0].blockingReason) {
await scanner.scan("AlplaPRODcmd89");
await scanner.scan(`${label.data[0].barcode}`);
}
// create the url to post
const url = await prodEndpointCreation(
"/public/v1.1/Manufacturing/ProductionControlling/BookOut",
);
const SSCC = await createSSCC(runningNr);
const bookOutData = {
sscc: SSCC.slice(2),
scannerId: "666",
};
try {
const results = await axios.post(url, bookOutData, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
if (results.data.Errors) {
return {
success: false,
status: 400,
message: results.data.Errors.Error.Description,
};
}
// if (results.data.Result !== 0) {
// console.log("stopping here and closing to soon", results);
// return {
// success: false,
// status: 400,
// message: results.data.Message,
// };
// }
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "book out",
bodySent: data,
reasonUsed: reason,
}),
);
return {
success: true,
message: `${runningNr} was booked out`,
status: results.status,
};
} catch (error: any) {
console.log(bookOutData);
return {
success: false,
status: 400,
message: error.response?.data,
data: error.response?.data,
};
}
// });
/**
* book out the label with
* url /public/v1.1/Manufacturing/ProductionControlling/BookOut
* {
* "sscc": "string",
* "scannerId": "string"
* }
*/
//---------------------------------------------------------------------------------------\\
};

View File

@@ -0,0 +1,96 @@
import axios from "axios";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
type Data = {
runningNr: number;
laneID: number;
};
export const relatePallet = async (data: Data) => {
const { runningNr, laneID } = data;
// replace the rn
// console.log(data);
// create the url to post
// do we have warehousing turned on?
const { data: feature, error: featureError } = (await tryCatch(
query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/Relocate";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/Relocate";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Unit/Relocate";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const SSCC = await createSSCC(runningNr);
const consumeSomething = {
ScannerId: 999,
laneId: laneID,
sscc: SSCC.slice(2),
};
console.log(consumeSomething);
try {
const results = await axios.post(url, consumeSomething, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
if (results.data.Errors) {
return {
success: false,
message: results.data.Errors.Error.Description,
};
}
if (results.data.Result !== 0 || results.data.data.length <= 0) {
return {
success: false,
message: results.data.Message,
};
}
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "relocate",
bodySent: data,
}),
);
return {
success: true,
message: "Pallet Was Relocated",
status: results.status,
};
} catch (error: any) {
console.log(error);
return {
success: false,
status: 200,
message: error.response?.data.errors[0].message,
};
}
};

View File

@@ -1,120 +1,50 @@
import axios from "axios";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { lstAuth } from "../../../../index.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { db } from "../../../../../database/dbclient.js"; import { db } from "../../../../../database/dbclient.js";
import net from "net"; import { commandLog } from "../../../../../database/schema/commandLog.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js"; import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
import { settings } from "../../../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { serverData } from "../../../../../database/schema/serverData.js";
export const removeAsNonReusable = async (data: any) => { export const removeAsNonReusable = async (data: any) => {
// const removalUrl = await prodEndpointCreation( // get the label info
// "/public/v1.0/Warehousing/RemoveAsNonReusableMaterial" const { data: label, error: labelError } = (await tryCatch(
// ); query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info"),
)) as any;
// const sscc = await createSSCC(data.runningNr); if (label.data[0].stockStatus === "notOnStock") {
return {
success: false,
message: `The label: ${data.runningNr} is not currently in stock`,
data: [],
};
}
// const { data: remove, error } = await tryCatch( if (label.data[0].blockingReason) {
// axios.post( return {
// removalUrl, success: false,
// { scannerId: "500", sscc: sscc.slice(2) }, status: 400,
// { message: `${data.runningNr} is currently blocked, to get this pallet removed please take the label to quality to be released then you can remove.`,
// headers: { Authorization: `Basic ${lstAuth}` }, data: [],
// } };
// ) }
// );
// use a scanner tcp connection to trigger this process await scanner.scan("AlplaPRODcmd23");
const STX = "\x02"; await scanner.scan(`${label.data[0].barcode}`);
const ETX = "\x03";
const scanner = new net.Socket();
let stage = 0;
// get the label info
const { data: label, error: labelError } = (await tryCatch(
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info")
)) as any;
if (label.data[0].stockStatus === "notOnStock") { let reason = data.reason || "";
return { delete data.reason;
success: false,
message: `The label: ${data.runningNr} is not currently in stock`,
data: [],
};
}
// get the server ip based on the token. const { data: commandL, error: ce } = await tryCatch(
const setting = await db.select().from(settings); db.insert(commandLog).values({
commandUsed: "removeAsNonReusable",
bodySent: data,
reasonUsed: reason,
}),
);
const plantInfo = await db.select().from(serverData); return {
const plantToken = setting.filter((n: any) => n.name === "plantToken"); success: true,
const scannerID = setting.filter((n: any) => n.name === "scannerID"); message: `The label: ${data.runningNr}, was removed`,
const scannerPort = setting.filter((n: any) => n.name === "scannerPort"); data: [],
const plantData = plantInfo.filter( };
(p: any) => p.plantToken === plantToken[0].value
);
scanner.connect(
parseInt(scannerPort[0].value),
plantData[0].idAddress!,
async () => {
// need to get the ip from the server data and scanner port
//console.log(`connected to scanner`);
scanner.write(`${STX}${scannerID[0].value}@AlplaPRODcmd23${ETX}`);
}
);
scanner.on("data", (data) => {
const response = data.toString();
//console.log("Received:", response.trimStart());
if (stage === 0) {
stage = 1;
scanner.write(
`${STX}${scannerID[0].value}@${label.data[0].Barcode}${ETX}`
);
} else if (stage === 1) {
scanner.end();
}
});
scanner.on("close", () => {
//console.log("Connection closed");
scanner.destroy();
});
scanner.on("error", (err) => {
//console.error("Scanner error:", err);
scanner.destroy();
return {
success: false,
message: `The label: ${data.runningNr} encountering an error while being removed, please try again`,
data: [],
};
});
// if (error) {
// //console.log(error);
// return {
// success: false,
// message: `There was an error removing ${data.runningNr}`,
// data: [],
// };
// }
let reason = data.reason || "";
delete data.reason;
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "removeAsNonReusable",
bodySent: data,
reasonUsed: reason,
})
);
return {
success: true,
message: `The label: ${data.runningNr}, was removed`,
data: [],
};
}; };

View File

@@ -1,4 +1,4 @@
import { addDays, addHours, isAfter, parse } from "date-fns"; import { addDays, addHours, isAfter, parse, subDays } from "date-fns";
import { format } from "date-fns-tz"; import { format } from "date-fns-tz";
import XLSX from "xlsx"; import XLSX from "xlsx";
import { db } from "../../../../../../../database/dbclient.js"; import { db } from "../../../../../../../database/dbclient.js";
@@ -94,6 +94,10 @@ export const abbottOrders = async (data: any, user: any) => {
}; };
const oOrders: any = openOrders; const oOrders: any = openOrders;
//console.log(orderData); //console.log(orderData);
function trimAll(str: string) {
return str.replace(/\s+/g, "");
}
let correctedOrders: any = orderData let correctedOrders: any = orderData
.filter( .filter(
(o: any) => (o: any) =>
@@ -103,9 +107,9 @@ export const abbottOrders = async (data: any, user: any) => {
.map((o: any) => ({ .map((o: any) => ({
date: excelDateStuff(o.date, o.time), date: excelDateStuff(o.date, o.time),
po: po:
o.newton8oz.replace(/\s+/g, "") !== "" trimAll(o.newton8oz) !== ""
? o.newton8oz.replace(/\s+/g, "") ? trimAll(o.newton8oz)
: o.newton10oz.replace(/\s+/g, ""), : o.newton10oz.replace(/[\s\u00A0]+/g, ""),
customerArticlenumber: customerArticlenumber:
o.newton8oz != "" o.newton8oz != ""
? a.filter((a: any) => a.av === 118)[0].CustomerArticleNumber ? a.filter((a: any) => a.av === 118)[0].CustomerArticleNumber
@@ -116,24 +120,29 @@ export const abbottOrders = async (data: any, user: any) => {
: a.filter((a: any) => a.av === 120)[0].totalTruckLoad, : a.filter((a: any) => a.av === 120)[0].totalTruckLoad,
})); }));
//console.log(correctedOrders);
// now we want to make sure we only correct orders that or after now // now we want to make sure we only correct orders that or after now
correctedOrders = correctedOrders.filter((o: any) => { correctedOrders = correctedOrders.filter((o: any) => {
const parsedDate = parse(o.date, "M/d/yyyy, h:mm:ss a", new Date()); const parsedDate = parse(o.date, "M/d/yyyy, h:mm:ss a", new Date());
return isAfter(o.date, new Date().toISOString()); return isAfter(new Date(o.date), new Date().toISOString());
}); });
//console.log(correctedOrders);
// last map to remove orders that have already been started // last map to remove orders that have already been started
// correctedOrders = correctedOrders.filter((oo: any) => // correctedOrders = correctedOrders.filter((oo: any) =>
// oOrders.some((o: any) => o.CustomerOrderNumber === oo.po) // oOrders.some((o: any) => o.CustomerOrderNumber === oo.po)
// ); // );
let postedOrders: any = []; let postedOrders: any = [];
const filterOrders: any = correctedOrders; const filterOrders: any = correctedOrders;
//console.log(filterOrders);
filterOrders.forEach((oo: any) => { filterOrders.forEach((oo: any) => {
const isMatch = openOrders.some( const isMatch = openOrders.some(
(o: any) => String(o.po).trim() === String(oo.po).trim(), (o: any) => String(o.po).trim() === String(oo.po).trim(),
); );
//console.log(isMatch, oo.po);
if (!isMatch) { if (!isMatch) {
//console.log(`ok to update: ${oo.po}`); console.log(`ok to update: ${oo.po}`);
// oo = { // oo = {
// ...oo, // ...oo,
@@ -141,7 +150,7 @@ export const abbottOrders = async (data: any, user: any) => {
// }; // };
postedOrders.push(oo); postedOrders.push(oo);
} else { } else {
// console.log(`Not valid order to update: ${oo.po}`); //console.log(`Not valid order to update: ${oo.po}`);
//console.log(oo) //console.log(oo)
} }
}); });
@@ -159,7 +168,7 @@ export const abbottOrders = async (data: any, user: any) => {
deliveryAddressId: 8, deliveryAddressId: 8,
customerArticleNo: o.customerArticlenumber, customerArticleNo: o.customerArticlenumber,
quantity: o.qty, quantity: o.qty,
deliveryDate: addHours(format(o.date, "M/d/yyyy HH:mm"), 1), //addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around deliveryDate: format(o.date, "M/d/yyyy HH:mm"), // addHours(format(o.date, "M/d/yyyy HH:mm"), 1), //addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
customerLineItemNo: 1, // this is how it is currently sent over from abbott customerLineItemNo: 1, // this is how it is currently sent over from abbott
customerReleaseNo: 1, // same as above customerReleaseNo: 1, // same as above
}, },

View File

@@ -1,6 +1,7 @@
import axios from "axios"; import axios from "axios";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js"; import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
export const postAdjustment = async (data: any) => { export const postAdjustment = async (data: any) => {
if (data.warehouseId === undefined) { if (data.warehouseId === undefined) {
@@ -35,9 +36,30 @@ export const postAdjustment = async (data: any) => {
quantity: data.quantity, quantity: data.quantity,
}; };
let url = await prodEndpointCreation( // do we have warehousing turned on?
"/public/v1.0/Warehousing/AdjustSiloStockLevel", const { data: feature, error: featureError } = (await tryCatch(
); query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const { data: silo, error } = await tryCatch( const { data: silo, error } = await tryCatch(
axios.post(url, siloAdjustment, { axios.post(url, siloAdjustment, {

View File

@@ -1,7 +1,9 @@
import { OpenAPIHono } from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js"; import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js";
import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js"; import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js";
import attachSilo from "./route/attachSilo.js"; import attachSilo from "./route/attachSilo.js";
import bookOutPallet from "./route/bookout.js";
import comsumeMaterial from "./route/consumeMaterial.js"; import comsumeMaterial from "./route/consumeMaterial.js";
import detachSilo from "./route/detachSilo.js"; import detachSilo from "./route/detachSilo.js";
import postBulkOrders from "./route/dm/bulkOrdersIn.js"; import postBulkOrders from "./route/dm/bulkOrdersIn.js";
@@ -16,6 +18,7 @@ import outbound from "./route/getOutbound.js";
import getPPOO from "./route/getPPOO.js"; import getPPOO from "./route/getPPOO.js";
import getConnectionType from "./route/getSiloConnectionData.js"; import getConnectionType from "./route/getSiloConnectionData.js";
import getSSCC from "./route/getSSCCNumber.js"; import getSSCC from "./route/getSSCCNumber.js";
import relocate from "./route/relocate.js";
import removeAsNonReable from "./route/removeAsNonReusable.js"; import removeAsNonReable from "./route/removeAsNonReusable.js";
import returnMat from "./route/returnMaterial.js"; import returnMat from "./route/returnMaterial.js";
import createSiloAdjustment from "./route/siloAdjustments/createSiloAdjustment.js"; import createSiloAdjustment from "./route/siloAdjustments/createSiloAdjustment.js";
@@ -28,7 +31,7 @@ const app = new OpenAPIHono();
const routes = [ const routes = [
comsumeMaterial, comsumeMaterial,
returnMat, returnMat,
relocate,
// silo // silo
createSiloAdjustment, createSiloAdjustment,
postComment, postComment,
@@ -55,6 +58,7 @@ const routes = [
// logisitcs // logisitcs
removeAsNonReable, removeAsNonReable,
getSSCC, getSSCC,
bookOutPallet,
] as const; ] as const;
// app.route("/server", modules); // app.route("/server", modules);

View File

@@ -0,0 +1,87 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
//import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { bookOutPallet } from "../controller/commands/bookout.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Consumes material based on its running number",
method: "post",
path: "/bookout",
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
const { data, error } = await tryCatch(c.req.json());
if (error) {
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
apiHit(c, { endpoint: "/bookout", lastBody: data });
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
//const pointData = { ...data, user: payload.user };
const bookout = await bookOutPallet(data);
console.log("from booout:", bookout);
return c.json(
{
success: bookout?.success,
message: bookout?.message,
data: bookout.data,
},
200,
);
} catch (error) {
console.log("from error:", error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
},
);
export default app;

View File

@@ -0,0 +1,80 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { relatePallet } from "../controller/commands/relocated.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Consumes material based on its running number",
method: "post",
path: "/relocate",
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
const { data, error } = await tryCatch(c.req.json());
if (error) {
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
apiHit(c, { endpoint: "/relocate", lastBody: data });
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
const consume = await relatePallet(data);
console.log(consume);
return c.json(
{ success: consume?.success, message: consume?.message },
200,
);
} catch (error) {
//console.log(error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
},
);
export default app;

View File

@@ -4,95 +4,92 @@ import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js"; import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js"; import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js";
import { sendEmail } from "../sendMail.js";
const notification = async (notifyData: any) => { const notification = async (notifyData: any) => {
/** /**
* Pass the entire notification over * Pass the entire notification over
*/ */
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`); createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails. // validate if there are any emails.
if (notifyData.emails === "") { if (notifyData.emails === "") {
createLog( createLog(
"error", "error",
"reprinting", "reprinting",
"notify", "notify",
`There are no emails set for ${notifyData.name}` `There are no emails set for ${notifyData.name}`,
); );
return; return;
} }
//let labels: Labels[]; //let labels: Labels[];
const { data: l, error: labelError } = await tryCatch( const { data: l, error: labelError } = await tryCatch(
query( query(
bow2incoming.replace( bow2incoming.replace("[time]", notifyData.notifiySettings.processTime),
"[time]", "Label Reprints",
notifyData.notifiySettings.processTime ),
), );
"Label Reprints" const labels: any = l?.data as any;
) if (labelError) {
); createLog(
const labels: any = l?.data as any; "error",
if (labelError) { "reprinting",
createLog( "notify",
"error", `Failed to get the labels: ${labelError}`,
"reprinting", );
"notify", return;
`Failed to get the labels: ${labelError}` }
);
return;
}
if (labels.length > 0) { if (labels.length > 0) {
//send the email :D //send the email :D
const emailSetup = { const emailSetup = {
email: notifyData.emails, email: notifyData.emails,
subject: "Alert! New incoming goods has been received", subject: "Alert! New incoming goods has been received",
template: "bow2IncomingGoods", template: "bow2IncomingGoods",
context: { context: {
items: labels, items: labels,
time: notifyData.notifiySettings.processTime, time: notifyData.notifiySettings.processTime,
}, },
}; };
const sentEmail = await sendEmail(emailSetup); const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) { if (!sentEmail.success) {
createLog( createLog(
"error", "error",
"reprinting", "reprinting",
"notify", "notify",
"Failed to send email, will try again on next interval" "Failed to send email, will try again on next interval",
); );
return; return;
} }
// // update the last time we ran and the prod id // // update the last time we ran and the prod id
// const notifUpdate = { // const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie, // prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(), // lastRan: nowDate(),
// }; // };
// update the last time ran // update the last time ran
const { data, error } = await tryCatch( const { data, error } = await tryCatch(
db db
.update(notifications) .update(notifications)
.set({ .set({
lastRan: sql`NOW()`, lastRan: sql`NOW()`,
notifiySettings: { notifiySettings: {
...notifyData.notifiySettings, ...notifyData.notifiySettings,
prodID: labels[0].IdEtikettenHistorie, prodID: labels[0].IdEtikettenHistorie,
}, },
}) })
.where(eq(notifications.name, notifyData.name)) .where(eq(notifications.name, notifyData.name)),
); );
} else { } else {
return; return;
} }
}; };
export default notification; export default notification;

View File

@@ -0,0 +1,108 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
export interface Labels {
IdEtikettenHistorie?: number;
}
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"reprinting",
"notify",
`There are no emails set for ${notifyData.name}`,
);
return;
}
const cycleCountCheck = sqlQuerySelector("cycleCountCheck.query") as SqlQuery;
if (!cycleCountCheck.success) {
console.log("Failed to load the query: ", cycleCountCheck.message);
return;
}
const { data: c, error: cError } = await tryCatch(
query(
cycleCountCheck.query.replace("[timeTest]", notifyData.checkInterval),
"Cycle count check",
),
);
const cycle: any = c?.data ?? ([] as any);
//console.log(cycle);
if (cError) {
createLog(
"error",
"reprinting",
"notify",
`Failed to get the labels: ${cError}`,
);
return;
}
if (cycle.length > 0) {
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: `Alert! RowBlocked for more than ${notifyData.checkInterval} min(s)`,
template: "cycleCountCheck",
context: {
checkTime: notifyData.checkInterval,
items: cycle,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"reprinting",
"notify",
"Failed to send email, will try again on next interval",
);
return;
}
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// update the last time ran
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
// notifiySettings: {
// ...notifyData.notifiySettings,
// prodID: labels[0].IdEtikettenHistorie,
// },
})
.where(eq(notifications.name, notifyData.name)),
);
} else {
return;
}
};
export default notification;

View File

@@ -1,112 +1,112 @@
import { isBefore } from "date-fns"; import { isBefore } from "date-fns";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js"; import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js";
import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js"; import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
export default async function fifoIndexCheck() { export default async function fifoIndexCheck() {
/** /**
* getting the shipped pallets * getting the shipped pallets
*/ */
const { data: shipped, error: eShipped } = await tryCatch( const { data: shipped, error: eShipped } = await tryCatch(
query(shippedPallets, "notify shipped pallets") query(shippedPallets, "notify shipped pallets"),
); );
const { data: currentStuff, error: eCurrentInv } = await tryCatch( const { data: currentStuff, error: eCurrentInv } = await tryCatch(
query(currentInv, "notify shipped pallets") query(currentInv, "notify shipped pallets"),
); );
// console.log(shipped?.data[2]); // console.log(shipped?.data[2]);
// console.log(currentStuff?.data[2]); // console.log(currentStuff?.data[2]);
/** /**
* We want to check if the each shippened pallet is out of fifo * We want to check if the each shippened pallet is out of fifo
*/ */
const check = shipped?.data.map((n: any) => { const check: any = shipped?.data.map((n: any) => {
/** /**
* Returns all data so we know if we are in or out. * Returns all data so we know if we are in or out.
*/ */
//check if there are pallets older than the current one we are mapped on. //check if there are pallets older than the current one we are mapped on.
const fifoCheck = currentStuff?.data.filter( const fifoCheck = currentStuff?.data.filter(
(i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av (i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av,
); ) as any;
//console.log(fifoCheck.length); //console.log(fifoCheck.length);
if (fifoCheck.length > 0) { if (fifoCheck.length > 0) {
// console.log("Out of fifo", { // console.log("Out of fifo", {
// av: n.av, // av: n.av,
// rn: n.runningNr, // rn: n.runningNr,
// fRn: fifoCheck[0].runningNr, // fRn: fifoCheck[0].runningNr,
// dates: [fifoCheck[0].prodDate, n.prodDate], // dates: [fifoCheck[0].prodDate, n.prodDate],
// }); // });
} }
return { return {
...n, ...n,
// currentInv: fifoCheck[0], // currentInv: fifoCheck[0],
fifoFollowed: fifoCheck.length === 0 ? true : false, fifoFollowed: fifoCheck.length === 0 ? true : false,
}; };
}); });
/** /**
* lets see just the av that is our or in * lets see just the av that is our or in
*/ */
const avCheck = (check: any) => { const avCheck = (check: any) => {
/** /**
* This will only return the data based on out of fifo. * This will only return the data based on out of fifo.
*/ */
// check how many times each av showed up // check how many times each av showed up
const avCounts = check.reduce((a: any, c: any) => { const avCounts = check.reduce((a: any, c: any) => {
if (c.fifoFollowed === false) { if (c.fifoFollowed === false) {
const avValue = c.av; const avValue = c.av;
a[avValue] = (a[avValue] || 0) + 1; a[avValue] = (a[avValue] || 0) + 1;
} }
return a; return a;
}, {}); }, {});
// transform them back to an avCount Object // transform them back to an avCount Object
const result = Object.keys(avCounts).map((av) => ({ const result = Object.keys(avCounts).map((av) => ({
av: parseInt(av, 10), av: parseInt(av, 10),
count: avCounts[av], count: avCounts[av],
})); }));
return result; return result;
}; };
const outOfFifo: any = avCheck(check); const outOfFifo: any = avCheck(check);
const totalOut = outOfFifo.reduce((sum: any, c: any) => { const totalOut = outOfFifo.reduce((sum: any, c: any) => {
return sum + c.count; return sum + c.count;
}, 0); }, 0);
/** /**
* add the data to the db * add the data to the db
*/ */
for (let i = 0; i < check.length; i++) { for (let i = 0; i < check!.length; i++) {
const { data: dbInsert, error: dbE } = await tryCatch( const { data: dbInsert, error: dbE } = await tryCatch(
db db
.insert(fifoIndex) .insert(fifoIndex)
.values({ .values({
lot: check[i].lot, lot: check[i].lot,
av: check[i].av, av: check[i].av,
runningNr: check[i].runningNr, runningNr: check[i].runningNr,
prodDate: check[i].prodDate, prodDate: check[i].prodDate,
fifoFollowed: check[i].fifoFollowed, fifoFollowed: check[i].fifoFollowed,
add_Date: check[i].add_Date, add_Date: check[i].add_Date,
}) })
.onConflictDoNothing() .onConflictDoNothing(),
); );
} }
return { return {
success: true, success: true,
message: "Fifo index data", message: "Fifo index data",
data: { data: {
palletsOut: check, palletsOut: check,
totalShipped: shipped?.data.length, totalShipped: shipped?.data.length,
inFifo: shipped?.data.length - totalOut, inFifo: shipped!.data.length - totalOut,
outOfFifoData: outOfFifo, outOfFifoData: outOfFifo,
}, },
}; };
} }

View File

@@ -0,0 +1,183 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
let running = false;
export default async function platToPlantEdi(notifyData: any) {
createLog("info", "plantToPlant", "notify", `monitoring ${notifyData.name}`);
if (running) {
createLog(
"info",
"plantToPlant",
"notify",
`Notifcation ${notifyData.name} is already running skipping`,
);
return;
}
running = true;
const { data: noti, error: notiError } = (await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, notifyData.name)),
)) as any;
if (notiError) {
createLog(
"error",
"edi",
"notify",
"Error in getting the notification data",
);
}
// get the default emails they can be blank if as we will only add these to the end of the email from the full flow
let emails = noti[0]?.email ?? "";
const checkBol = sqlQuerySelector("checkBol.query");
if (!checkBol.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
const pLinkedB = sqlQuerySelector("palletsLinkedToBol.query");
if (!pLinkedB.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
let ignoreBols: string[] = noti[0]?.notifiySettings?.processedBol ?? [];
const joinBols = ignoreBols.join(",");
let updateQuery = noti[0]?.notifiySettings?.includeAll
? checkBol?.query?.replace(
"and a.bezeichnung like '%Alpla%'",
"--and a.bezeichnung like '%Alpla%'",
)
: checkBol?.query;
const { data: b, error: bError } = (await tryCatch(
query(
updateQuery
?.replace("[timeCheck]", noti[0]?.checkInterval ?? "30")
.replace("[ignoreBols]", joinBols ?? 500) ?? "",
"Check bol",
),
)) as any;
if (bError) {
return {
success: false,
message: "Error getting newly created bols",
data: bError,
};
}
const planedByBol = new Map<string, string[]>();
for (const row of b.data) {
if (!planedByBol.has(row.bol)) {
planedByBol.set(row.bol, []);
}
planedByBol.get(row.bol)!.push(String(row.idladeplanung));
}
if (b.data.length > 0) {
// loop each bol in the system and get the bols only
for (const [bolNumber, idList] of planedByBol.entries()) {
//for (const bol of b.data) {
// run the process to get the the pallet numbers
const joinedIdLadeplanung = idList.join(",");
//console.log("BOL:", bolNumber);
//console.log("IDLadeplanung string:", joinedIdLadeplanung);
//console.log("IgnoreBols: ", joinBols);
// now get the pallets that are witing the ladeplanning
const { data: pallets, error: pError } = await tryCatch(
query(
pLinkedB?.query?.replace(
"[palLinkedToBol]",
joinedIdLadeplanung ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
);
//console.log(pallets);
// console.log("Address: ", b.data[0].addressId ?? "0");
if (b.data[0].addressId === "") return;
ignoreBols.push(bolNumber);
if (ignoreBols.length > 15) {
ignoreBols.splice(0, ignoreBols.length - 15);
}
// get the email address.
const checkBol = sqlQuerySelector("addressInfo.query");
const { data: address, error: aError } = (await tryCatch(
query(
checkBol?.query?.replace(
"[customerAddress]",
b.data[0].addressId ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
)) as any;
if (noti[0]?.emails === "") return; // no default emails
// setup the email to be sent :D
const emailSetup = {
email: `${noti[0]?.emails};${address.data[0].email ?? ""}`,
subject: `New EDI transfer Created for BOL: ${bolNumber}`,
template: "plantToPlantEdi",
context: {
items: pallets?.data ?? [],
bol: bolNumber,
//secondarySetting: notifyData.notifiySettings,
},
};
// send the email
await sendEmail(emailSetup);
// add the bols to be ignored
await db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...noti[0]?.notifiySettings,
processedBol: ignoreBols,
},
})
.where(eq(notifications.name, notifyData.name));
}
running = false;
return {
success: true,
message: "All bols have been processed",
data: [ignoreBols],
};
}
running = false;
return {
success: true,
message: "No new bols have been created",
data: [],
};
}

View File

@@ -0,0 +1,118 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { errorMonitor } from "node:events";
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { settings } from "../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../sendMail.js";
export interface DownTime {
downTimeId?: number;
machineAlias?: string;
}
export default async function tooManyErrors(notifyData: any) {
// we will over ride this with users that want to sub to this
// a new table will be called subalerts and link to the do a kinda linkn where the user wants it then it dose subId: 1, userID: x, notificationId: y. then in here we look up the userid to get the email :D
// this could then leave the emails in the notificaion blank and let users sub to it.
//console.log(notifyData);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`,
);
return;
}
// console.log(data.secondarySetting[0].duration);
const plant = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
console.log(plant[0].value);
// console.log(
// errorQuery
// .replace("[time]", notifyData.checkInterval)
// .replace("[errorCount]", notifyData.notifiySettings.errorCount),
// errorLogQuery.replace("[time]", notifyData.checkInterval),
// );
let errorLogData: any = [];
try {
const errorData = await db.execute(sql`
SELECT 'error' AS level, COUNT(*) AS error_count
FROM public.logs
WHERE level = 'error'
AND "add_Date" > now() - INTERVAL ${sql.raw(`'${notifyData.checkInterval} minutes'`)}
GROUP BY level
HAVING COUNT(*) >= ${notifyData.notifiySettings.errorCount}
`);
if (
errorData.length > 0
// && downTime[0]?.downTimeId > notifyData.notifiySettings.prodID
) {
const errorLogs = await db.execute(sql`
select* from public.logs where level = 'error' and "add_Date" > now() - INTERVAL ${sql.raw(`'${notifyData.checkInterval} minutes'`)} order by "add_Date" desc;
`);
errorLogData = errorLogs;
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: `Alert! ${plant[0].value} has encountered ${
errorLogData.length
} ${errorLogData.length > 1 ? "errors" : "error"} in the last ${notifyData.checkInterval} min`,
template: "tooManyErrors",
context: {
data: errorLogData.slice(0, 100),
count: notifyData.notifiySettings.errorCount,
time: notifyData.checkInterval,
},
};
//console.log(emailSetup);
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval",
);
return {
success: false,
message: "Failed to send email, will try again on next interval",
data: sentEmail,
};
}
}
} catch (err) {
console.log(err);
createLog(
"error",
"notify",
"notify",
`Error from running the downtimeCheck query: ${err}`,
);
return {
success: false,
message: "Error running error data",
data: err,
};
}
return {
success: true,
message: "Error log checking ran",
data: errorLogData ?? [],
};
}

View File

@@ -45,57 +45,57 @@ export const sendEmail = async (data: any): Promise<any> => {
}; };
} }
// get the plantToken // get the plantToken
const server = settingData.filter((n) => n.name === "server"); //const server = settingData.filter((n) => n.name === "server");
if ( // if (
server[0].value === "localhostx" && // server[0].value === "localhostx" &&
process.env.EMAIL_USER && // process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD // process.env.EMAIL_PASSWORD
) { // ) {
transporter = nodemailer.createTransport({ // transporter = nodemailer.createTransport({
service: "gmail", // service: "gmail",
host: "smtp.gmail.com", // host: "smtp.gmail.com",
port: 465, // port: 465,
auth: { // auth: {
user: process.env.EMAIL_USER, // user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD, // pass: process.env.EMAIL_PASSWORD,
}, // },
//debug: true, // //debug: true,
}); // });
// update the from email // // update the from email
fromEmail = process.env.EMAIL_USER; // fromEmail = process.env.EMAIL_USER;
} else { //} else {
// convert to the correct plant token. // convert to the correct plant token.
const plantToken = settingData.filter((s) => s.name === "plantToken"); //const plantToken = settingData.filter((s) => s.name === "plantToken");
let host = `${plantToken[0].value}-smtp.alpla.net`; // let host = `${plantToken[0].value}-smtp.alpla.net`;
const testServers = ["test1", "test2", "test3"]; // const testServers = ["test1", "test2", "test3"];
if (testServers.includes(plantToken[0].value)) { // if (testServers.includes(plantToken[0].value)) {
host = "USMCD1-smtp.alpla.net"; // host = "USMCD1-smtp.alpla.net";
} // }
if (plantToken[0].value === "usiow2") { // if (plantToken[0].value === "usiow2") {
host = "USIOW1-smtp.alpla.net"; // host = "USIOW1-smtp.alpla.net";
} // }
transporter = nodemailer.createTransport({ transporter = nodemailer.createTransport({
host: host, host: "smtp.azurecomm.net",
port: 25, port: 587,
rejectUnauthorized: false, //rejectUnauthorized: false,
//secure: false, tls: {
// auth: { minVersion: "TLSv1.2",
// user: "alplaprod", },
// pass: "obelix", auth: {
// }, user: "donotreply@mail.alpla.com",
debug: true, pass: process.env.SMTP_PASSWORD,
} as SMTPTransport.Options); },
debug: true,
// update the from email });
fromEmail = `noreply@alpla.com`; fromEmail = `DoNotReply@mail.alpla.com`;
} //}
// creating the handlbar options // creating the handlbar options
const viewPath = path.resolve( const viewPath = path.resolve(

View File

@@ -10,7 +10,9 @@ import tiTrigger from "./routes/manualTiggerTi.js";
import materialCheck from "./routes/materialPerDay.js"; import materialCheck from "./routes/materialPerDay.js";
import blocking from "./routes/qualityBlocking.js"; import blocking from "./routes/qualityBlocking.js";
import sendemail from "./routes/sendMail.js"; import sendemail from "./routes/sendMail.js";
import errorHandling from "./routes/tooManyErrors.js";
import { note, notificationCreate } from "./utils/masterNotifications.js"; import { note, notificationCreate } from "./utils/masterNotifications.js";
import { sqlJobCleanUp } from "./utils/notificationSqlCleanup.js";
import { startNotificationMonitor } from "./utils/processNotifications.js"; import { startNotificationMonitor } from "./utils/processNotifications.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
@@ -23,6 +25,7 @@ const routes = [
notify, notify,
fifoIndex, fifoIndex,
materialCheck, materialCheck,
errorHandling,
] as const; ] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {
@@ -55,6 +58,7 @@ if (notesError) {
setTimeout(() => { setTimeout(() => {
notificationCreate(); notificationCreate();
startNotificationMonitor(); startNotificationMonitor();
sqlJobCleanUp();
}, 5 * 1000); }, 5 * 1000);
export default app; export default app;

View File

@@ -0,0 +1,50 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { notifications } from "../../../../database/schema/notifications.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import hasCorrectRole from "../../auth/middleware/roleCheck.js";
import tooManyErrors from "../controller/notifications/tooManyErrors.js";
import { getAllJobs } from "../utils/processNotifications.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["server"],
summary: "Returns current active notifications.",
method: "get",
path: "/toomanyerrors",
middleware: [authMiddleware, hasCorrectRole(["systemAdmin"], "admin")],
responses: responses(),
}),
async (c) => {
apiHit(c, { endpoint: "/toomanyerrors" });
const { data, error } = await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, "tooManyErrors")),
);
if (error) {
return c.json({
success: false,
message: "Error Getting Notification Settings.",
data: error,
});
}
const errorData = await tooManyErrors(data[0]);
return c.json({
success: true,
message: "Current Error log data",
data: errorData?.data,
});
},
);
export default app;

View File

@@ -3,175 +3,208 @@ import { notifications } from "../../../../database/schema/notifications.js";
import { createLog } from "../../logger/logger.js"; import { createLog } from "../../logger/logger.js";
export const note: any = [ export const note: any = [
{ {
name: "reprintLabels", name: "reprintLabels",
description: description:
"Monitors the labels that are printed and returns a value if one falls withing the time frame defined below.", "Monitors the labels that are printed and returns a value if one falls withing the time frame defined below.",
checkInterval: 1, checkInterval: 1,
timeType: "min", timeType: "min",
emails: "", emails: "",
active: false, active: false,
notifiySettings: { prodID: 1 }, notifiySettings: { prodID: 1 },
}, },
{ {
name: "downTimeCheck", name: "downTimeCheck",
description: description: "Checks for specific downtimes that are greater than 105 min.",
"Checks for specific downtimes that are greater than 105 min.", checkInterval: 30,
checkInterval: 30, timeType: "min",
timeType: "min", emails: "",
emails: "", active: false,
active: false, notifiySettings: { prodID: 1, daysInPast: 5, duration: 105 },
notifiySettings: { prodID: 1, daysInPast: 5, duration: 105 }, },
}, {
{ name: "qualityBlocking",
name: "qualityBlocking", description:
description: "Checks for new blocking orders that have been entered, recommened to get the most recent order in here before activating.",
"Checks for new blocking orders that have been entered, recommened to get the most recent order in here before activating.", checkInterval: 30,
checkInterval: 30, timeType: "min",
timeType: "min", emails: "",
emails: "", active: false,
active: false, notifiySettings: {
notifiySettings: { prodID: 1,
prodID: 1, sentBlockingOrders: [{ timeStamp: "0", blockingOrder: 1 }],
sentBlockingOrders: [{ timeStamp: "0", blockingOrder: 1 }], },
}, },
}, {
{ name: "productionCheck",
name: "productionCheck", description: "Checks ppoo",
description: "Checks ppoo", checkInterval: 2,
checkInterval: 2, timeType: "hour",
timeType: "hour", emails: "",
emails: "", active: false,
active: false, notifiySettings: {
notifiySettings: { prodID: 1,
prodID: 1, count: 0,
count: 0, weekend: false,
weekend: false, locations: "0",
locations: "0", },
}, },
}, {
{ name: "stagingCheck",
name: "stagingCheck", description:
description: "Checks staging based on locations, locations need to be seperated by a ,",
"Checks staging based on locations, locations need to be seperated by a ,", checkInterval: 2,
checkInterval: 2, timeType: "hour",
timeType: "hour", emails: "",
emails: "", active: false,
active: false, notifiySettings: {
notifiySettings: { prodID: 1,
prodID: 1, count: 0,
count: 0, weekend: false,
weekend: false, locations: "0",
locations: "0", },
}, },
}, {
{ name: "tiIntergration",
name: "tiIntergration", description: "Checks for new releases to be put into ti",
description: "Checks for new releases to be put into ti", checkInterval: 60,
checkInterval: 60, timeType: "min",
timeType: "min", emails: "",
emails: "", active: false,
active: false, notifiySettings: {
notifiySettings: { prodID: 1,
prodID: 1, start: 36,
start: 36, end: 36,
end: 36, releases: [{ timeStamp: "0", releaseNumber: 1 }],
releases: [{ timeStamp: "0", releaseNumber: 1 }], },
}, },
}, {
{ name: "exampleNotification",
name: "exampleNotification", description: "Checks for new releases to be put into ti",
description: "Checks for new releases to be put into ti", checkInterval: 2,
checkInterval: 2, timeType: "min",
timeType: "min", emails: "",
emails: "", active: true,
active: true, notifiySettings: {
notifiySettings: { prodID: 1,
prodID: 1, start: 36,
start: 36, end: 36,
end: 36, releases: [1, 2, 3],
releases: [1, 2, 3], },
}, },
}, {
{ name: "fifoIndex",
name: "fifoIndex", description: "Checks for pallets that were shipped out of fifo",
description: "Checks for pallets that were shipped out of fifo", checkInterval: 1,
checkInterval: 1, timeType: "hour",
timeType: "hour", emails: "blake.matthes@alpla.com",
emails: "blake.matthes@alpla.com", active: false,
active: false, notifiySettings: {
notifiySettings: { prodID: 1,
prodID: 1, start: 36,
start: 36, end: 36,
end: 36, releases: [1, 2, 3],
releases: [1, 2, 3], },
}, },
}, {
{ name: "bow2henkelincoming",
name: "bow2henkelincoming", description:
description: "Checks for new incoming goods orders to be completed and sends an email for what truck and carrier it was",
"Checks for new incoming goods orders to be completed and sends an email for what truck and carrier it was", checkInterval: 15,
checkInterval: 15, timeType: "min",
timeType: "min", emails: "blake.matthes@alpla.com",
emails: "blake.matthes@alpla.com", active: false,
active: false, notifiySettings: { processTime: 15 },
notifiySettings: { processTime: 15 }, },
}, {
{ name: "palletsRemovedAsWaste",
name: "palletsRemovedAsWaste", description:
description: "Validates stock to make sure, there are no pallets released that have been removed as waste already ",
"Validates stock to make sure, there are no pallets released that have been removed as waste already ", checkInterval: 15,
checkInterval: 15, timeType: "min",
timeType: "min", emails: "blake.matthes@alpla.com",
emails: "blake.matthes@alpla.com", active: false,
active: false, notifiySettings: { prodID: 1 },
notifiySettings: { prodID: 1 }, },
}, {
{ name: "shortageBookings",
name: "shortageBookings", description:
description: "Checks for material shortage bookings by single av type or all types ",
"Checks for material shortage bookings by single av type or all types ", checkInterval: 15,
checkInterval: 15, timeType: "min",
timeType: "min", emails: "blake.matthes@alpla.com",
emails: "blake.matthes@alpla.com", active: false,
active: false, notifiySettings: {
notifiySettings: { time: 15,
time: 15, type: "all", // change this to something else or leave blank to use the av type
type: "all", // change this to something else or leave blank to use the av type avType: 1,
avType: 1, },
}, },
}, {
name: "tooManyErrors",
description:
"Checks to see how many errors in the last x time and sends an email based on this.",
checkInterval: 15,
timeType: "min",
emails: "blake.matthes@alpla.com",
active: true,
notifiySettings: {
errorCount: 10, // change this to something else or leave blank to use the av type
},
},
{
name: "cycleCountCheck",
description:
"Checks if a cycle count has been active for longer than the defined time.",
checkInterval: 60,
timeType: "min",
emails: "",
active: false,
notifiySettings: {
errorCount: 10, // change this to something else or leave blank to use the av type
},
},
{
name: "platToPlantEdi",
description:
"This is the plant to plant edi that will send an edi to the email once it ships, the emails will be for the receiving plants",
checkInterval: 15,
timeType: "min",
emails: "blake.matthes@alpla.com;Maritza.Hernandez@alpla.com",
active: false,
notifiySettings: { processedBol: [500], includeAll: false },
},
]; ];
export const notificationCreate = async () => { export const notificationCreate = async () => {
for (let i = 0; i < note.length; i++) { for (let i = 0; i < note.length; i++) {
try { try {
const notify = await db const notify = await db
.insert(notifications) .insert(notifications)
.values(note[i]) .values(note[i])
.onConflictDoUpdate({ .onConflictDoUpdate({
target: notifications.name, target: notifications.name,
set: { set: {
name: note[i].name, name: note[i].name,
description: note[i].description, description: note[i].description,
//notifiySettings: note[i].notifiySettings, //notifiySettings: note[i].notifiySettings,
}, },
}); });
} catch (error) { } catch (error) {
createLog( createLog(
"error", "error",
"notify", "notify",
"notify", "notify",
`There was an error getting the notifications: ${JSON.stringify( `There was an error getting the notifications: ${JSON.stringify(
error error,
)}` )}`,
); );
} }
} }
createLog( createLog(
"info", "info",
"lst", "lst",
"nofity", "nofity",
"notifications were just added/updated due to server startup" "notifications were just added/updated due to server startup",
); );
}; };

View File

@@ -0,0 +1,86 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../../sqlServer/utils/querySelector.utils.js";
const cleanUpQuery = `
DECLARE @JobName varchar(max) = '[jobName]'
UPDATE msdb.dbo.sysjobs
SET enabled = 0
WHERE name = @JobName;
`;
// disable the jobs
const jobNames: string[] = [
"monitor_$_lots",
"monitor_$_lots_2",
"monitor$lots",
"Monitor_APO", //listen for people to cry this is no longer a thing
"Monitor_APO2",
"Monitor_AutoConsumeMaterials", // TODO: migrate to lst
"Monitor_AutoConsumeMaterials_iow1",
"Monitor_AutoConsumeMaterials_iow2",
"Monitor_BlockedINV_Loc",
"monitor_inv_cycle",
"monitor_inv_cycle_1",
"monitor_inv_cycle_2",
"monitor_edi_import", // TODO: migrate to lst -- for the query select count(*) from AlplaPROD_test3.dbo.T_EDIDokumente (nolock) where /* IdLieferant > 1 and */ add_date > DATEADD(MINUTE, -30, getdate())
"Monitor_Lot_Progression",
"Monitor_Lots", // TODO: migrate to lst -- this should be the one where we monitor the when a lot is assigned if its missing some data.
"Monitor_MinMax", // TODO:Migrate to lst
"Monitor_MinMax_iow2",
"Monitor_PM",
"Monitor_Purity",
"monitor_wastebookings", // TODO: Migrate
"LastPriceUpdate", // not even sure what this is
"GETLabelsCount", // seems like an old jc job
"jobforpuritycount", // was not even working correctly
"Monitor_EmptyAutoConsumLocations", // not sure who uses this one
"monitor_labelreprint", // Migrated but need to find out who really wants this
"test", // not even sure why this is active
"UpdateLastMoldUsed", // old jc inserts data into a table but not sure what its used for not linked to any other alert
"UpdateWhsePositions3", // old jc inserts data into a table but not sure what its used for not linked to any other alert
"UpdateWhsePositions4",
"delete_print", // i think this was in here for when we was having lag prints in iowa1
"INV_WHSE_1", // something random i wrote long time ago looks like an inv thing to see aged stuff
"INV_WHSE_2",
"laneAgeCheck", // another strange one thats been since moved to lst
"monitor_blocking_2",
"monitor_blocking", // already in lst
"monitor_min_inv", // do we still want this one? it has a description of: this checks m-f the min inventory of materials based on the min level set in stock
"Monitor_MixedLocations",
"Monitor_PM",
"Monitor_PM2",
"wrong_lots_1",
"wrong_lots_2",
"invenotry check", // spelling error one of my stupids
"monitor_hold_monitor",
"Monitor_Silo_adjustments",
"monitor_qualityLocMonitor", // validating with lima this is still needed
];
export const sqlJobCleanUp = async () => {
// running a query to disable jobs that are moved to lst to be better maintained
const sqlQuery = sqlQuerySelector("disableJob.query") as SqlQuery;
if (!sqlQuery.success) {
console.log("Failed to load the query: ", sqlQuery.message);
return;
}
for (const job of jobNames) {
const { data, error } = await tryCatch(
query(
sqlQuery.query.replace("[jobName]", `${job}`),
`Disabling job: ${job}`,
),
);
if (error) {
console.log(error);
}
//console.log(data);
}
};

View File

@@ -0,0 +1,44 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>The below are cycle counts that have been in progress for longer than {{checkTime}} min(s). </p>
<table >
<thead>
<tr>
<th>WarehouseID</th>
<th>Warehouse</th>
<th>LocationID</th>
<th>Location</th>
<th>Cycle count Started</th>
<th>Started by</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{idWarehouse}}</td>
<td>{{warehouse}}</td>
<td>{{locationId}}</td>
<td>{{location}}</td>
<td>{{cycleCountStartAt}}</td>
<td>{{blockedBy}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,46 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>BOL: {{bol}} was created with the below pallets.</p>
<p>Please head to stock and import the pallets via the normal incoming goods process (now/immediately).</p>
<p>When encountering a discrepancy in pallets/cages received, please correct this after the pallets have been imported.</p>
<p>Due to these being plant to plant shipments, the only way to correct this is to bring them in then undo the incoming goods process.</p>
<br></br>
<table >
<thead>
<tr>
<th>Running Number</th>
<th>AV</th>
<th>Description</th>
<th>Lot number</th>
<th>Quantity</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{runningNr}}</td>
<td>{{article}}</td>
<td>{{alias}}</td>
<td>{{lotNumber}}</td>
<td>{{qty}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,42 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>The plant has encountered more than {{count}} errors in the last {{time}} mins, please see below errors and address as needed. </p>
<table >
<thead>
<tr>
<th>Username</th>
<th>Service</th>
<th>Message</th>
<th>Checked</th>
<th>LogTime</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each data}}
<tr>
<td>{{username}}</td>
<td>{{service}}</td>
<td>{{message}}</td>
<td>{{checked}}</td>
<td>{{add_Date}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -7,7 +7,10 @@ import { mmQuery } from "../../../sqlServer/querys/ocp/mainMaterial.js";
export const isMainMatStaged = async (lot: any) => { export const isMainMatStaged = async (lot: any) => {
const set = serverSettings.length === 0 ? [] : serverSettings; const set = serverSettings.length === 0 ? [] : serverSettings;
// make staged false by deefault and error logged if theres an issue const checkColorSetting = set.filter((n) => n.name === "checkColor");
const checkPKGSetting = set.filter((n) => n.name === "checkPKG");
// make staged false by default and error logged if theres an issue
let isStaged = { message: "Material is staged", success: true }; let isStaged = { message: "Material is staged", success: true };
const { data, error } = (await tryCatch( const { data, error } = (await tryCatch(
@@ -43,7 +46,7 @@ export const isMainMatStaged = async (lot: any) => {
}; };
} }
// strangly the lot is not always sending over in slc so adding this in for now to see what line is cauing this issue // strangely the lot is not always sending over in slc so adding this in for now to see what line is cauing this issue
if (!lot) { if (!lot) {
createLog("info", "mainMaterial", "ocp", "No lot was passed correctly."); createLog("info", "mainMaterial", "ocp", "No lot was passed correctly.");
return isStaged; return isStaged;
@@ -125,7 +128,11 @@ export const isMainMatStaged = async (lot: any) => {
createLog("info", "mainMaterial", "ocp", `Maint material query ran.`); createLog("info", "mainMaterial", "ocp", `Maint material query ran.`);
const mainMaterial = res.find((n: any) => n.IsMainMaterial); const mainMaterial = res.find((n: any) => n.IsMainMaterial);
if (mainMaterial?.Staged === 1) {
if (
mainMaterial?.Staged === 1 &&
(checkColorSetting[0].value !== "1" || checkPKGSetting[0].value !== "1")
) {
createLog( createLog(
"info", "info",
"mainMaterial", "mainMaterial",
@@ -152,7 +159,6 @@ export const isMainMatStaged = async (lot: any) => {
// we need to filter the color stuff and then look for includes instead of a standard name. this way we can capture a everything and not a single type // we need to filter the color stuff and then look for includes instead of a standard name. this way we can capture a everything and not a single type
// for manual consume color if active to check colors // for manual consume color if active to check colors
const checkColorSetting = set.filter((n) => n.name === "checkColor");
// 2. Auto color // 2. Auto color
if (checkColorSetting[0].value === "1") { if (checkColorSetting[0].value === "1") {
@@ -162,7 +168,7 @@ export const isMainMatStaged = async (lot: any) => {
results: res, results: res,
lot, lot,
filterFn: (n) => filterFn: (n) =>
n.isManual && !n.isManual &&
!("noPKGAutoShortage" in n) && !("noPKGAutoShortage" in n) &&
!("noPKGManualShortage" in n), // pool = non-main, auto !("noPKGManualShortage" in n), // pool = non-main, auto
failCondition: (n) => n.autoConsumeCheck === "autoConsumeNOK", // column = autoConsumeCheck failCondition: (n) => n.autoConsumeCheck === "autoConsumeNOK", // column = autoConsumeCheck
@@ -202,7 +208,7 @@ export const isMainMatStaged = async (lot: any) => {
} }
// // if we want to check the packaging // // if we want to check the packaging
const checkPKGSetting = set.filter((n) => n.name === "checkPKG");
if (checkPKGSetting[0].value === "1") { if (checkPKGSetting[0].value === "1") {
const pkgAuto = checkCondition({ const pkgAuto = checkCondition({
results: res, results: res,

View File

@@ -0,0 +1,148 @@
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { serverSettings } from "../../../server/controller/settings/getSettings.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { machineCheck } from "../../../sqlServer/querys/ocp/machineId.js";
import { mmQuery } from "../../../sqlServer/querys/ocp/mainMaterial.js";
export const isMainMatStaged = async (lot: any) => {
const set = serverSettings.length === 0 ? [] : serverSettings;
// make staged false by default and error logged if theres an issue
let isStaged = { message: "Material is staged", success: true };
// validate the machine actaully needs materials to print
const { data, error } = (await tryCatch(
query(
machineCheck.replace("where Active = 1 and [Location] = [loc]", ""),
"check machine needs mm",
),
)) as any;
const machine = data.data.filter(
(m: any) => m.HumanReadableId === lot.machineID,
);
// just in case we encounter an issue with the machines
if (machine.length === 0) {
createLog(
"error",
"mainMaterial",
"ocp-system",
"Invalid machine passed over.",
);
return {
success: false,
message: "Invalid machine passed over.",
};
}
// we have a check on ksc side to ignore the tetra machine for now as its not updating in 2.0
if (!machine[0].StagingMainMaterialMandatory) {
createLog(
"info",
"mainMaterial",
"ocp",
`The machine dose not require mm to print and book in.`,
);
return {
message: "Machine dose not require material to be staged",
success: true,
};
}
// strangely the lot is not always sending over in slc so adding this in for now to see what line is cauing this issue
if (!lot) {
createLog("info", "mainMaterial", "ocp", "No lot was passed correctly.");
return isStaged;
}
if (typeof lot !== "object" || lot === null || Array.isArray(lot)) {
createLog(
"info",
"mainMaterial",
"ocp",
`The lot sent over is not an object: ${JSON.stringify(lot)}`,
);
return isStaged;
}
// get the materials needed for the passed over lot
const { data: material, error: errorMat } = (await tryCatch(
query(mmQuery.replaceAll("[lotNumber]", lot.lot), "Main Material Check"),
)) as any;
if (errorMat) {
return { message: "Failed to get lot info", success: false };
}
const mat = material.data;
const mainMaterial = mat.find((n: any) => n.IsMainMaterial);
const checkColorSetting = set.filter((n) => n.name === "checkColor");
const checkPKGSetting = set.filter((n) => n.name === "checkPKG");
// if we only care about having the check for mm staged and dont care about the rules we just let it fly by.
// risk here is getting $Shortage if there really is nothing
if (
mainMaterial?.Staged === 1 &&
(checkColorSetting[0].value !== "1" || checkPKGSetting[0].value !== "1")
) {
createLog(
"info",
"mainMaterial",
"ocp",
`Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is staged for ${lot.lot}`,
);
return {
message: `Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is staged for ${lot.lot}`,
success: true,
};
}
// do we have enough main material for the next pallet
if (mainMaterial?.noMMShortage === "noMM") {
createLog(
"info",
"mainMaterial",
"ocp",
`Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is not staged for ${lot.lot}`,
);
return {
message: `Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is not staged for ${lot.lot}`,
success: false,
};
}
// do we have color to the line
if (checkColorSetting[0].value === "1") {
const autoConsumeColor = mat.find(
(n: any) =>
!n.isManual &&
!("noPKGAutoShortage" in n) &&
!("noPKGManualShortage" in n),
);
if (autoConsumeColor.autoConsumeCheck === "autoConsumeNOK") {
createLog(
"info",
"mainMaterial",
"ocp",
`lot: ${lot.lot}, is missing: ${autoConsumeColor
.map(
(o: any) =>
`${o.MaterialHumanReadableId} - ${o.MaterialDescription}`,
)
.join(",\n ")} for autoconsume`,
);
return {
message: `lot: ${lot.lot}, is missing: ${autoConsumeColor
.map(
(o: any) =>
`${o.MaterialHumanReadableId} - ${o.MaterialDescription}`,
)
.join(",\n ")} for autoconsume`,
success: false,
};
}
}
};

View File

@@ -20,84 +20,90 @@ const palletSend = new Tag("Zone_6.Ready_to_Send");
const strapperError = new Tag("Zone_3.Strapper_Faulted"); const strapperError = new Tag("Zone_3.Strapper_Faulted");
export const dycoConnect = async () => { export const dycoConnect = async () => {
// if we crash or start over reset the timers so we dont get duplicates // if we crash or start over reset the timers so we dont get duplicates
clearInterval(plcCycle); clearInterval(plcCycle);
if (isDycoRunning) if (isDycoRunning)
return { success: false, message: "Dyco is already connected." }; return { success: false, message: "Dyco is already connected." };
// Remove all listeners before adding a new one to prevent memory leaks // Remove all listeners before adding a new one to prevent memory leaks
PLC.removeAllListeners("error"); PLC.removeAllListeners("error");
try { try {
await PLC.connect(plcAddress, 0).then(async () => { await PLC.connect(plcAddress, 0).then(async () => {
createLog("info", "dyco", "ocp", `We are connected to the dyco.`); createLog("info", "dyco", "ocp", `We are connected to the dyco.`);
isDycoRunning = true; isDycoRunning = true;
plcCycle = setInterval(async () => { plcCycle = setInterval(async () => {
if (isReading) { if (isReading) {
createLog( createLog(
"debug", "debug",
"dyco", "dyco",
"ocp", "ocp",
"Skipping cycle: previous read still in progress." "Skipping cycle: previous read still in progress.",
); );
return; return;
} }
isReading = true; // Set flag isReading = true; // Set flag
try { try {
await PLC.readTag(labelerTag); await PLC.readTag(labelerTag);
await PLC.readTag(palletSend); await PLC.readTag(palletSend);
await PLC.readTag(strapperError); await PLC.readTag(strapperError);
// strapper check // strapper check
strapperFaults(strapperError); strapperFaults(strapperError);
// send the labeler tag data over // send the labeler tag data over
labelerTagRead(labelerTag); labelerTagRead(labelerTag);
// send the end of line check over. // send the end of line check over.
palletSendTag(palletSend); palletSendTag(palletSend);
} catch (error: any) { } catch (error: any) {
createLog( createLog(
"error", "error",
"dyco", "dyco",
"ocp", "ocp",
`Error reading PLC tag: ${error.message}` `Error reading PLC tag: ${error.message}`,
); );
} finally { // if we error out we want to disconnect and reconnect
isReading = false; // Reset flag closeDyco();
} setTimeout(() => {
}, plcInterval); createLog("info", "dyco", "ocp", `Reconnecting to the dyco`);
}); dycoConnect();
} catch (error) { }, 2 * 1000);
createLog( } finally {
"error", isReading = false; // Reset flag
"dyco", }
"ocp", }, plcInterval);
`There was an error in the dyco: ${error}` });
); } catch (error) {
await PLC.disconnect(); createLog(
isDycoRunning = false; "error",
} "dyco",
"ocp",
`There was an error in the dyco: ${error}`,
);
await PLC.disconnect();
isDycoRunning = false;
}
}; };
export const closeDyco = async () => { export const closeDyco = async () => {
if (!isDycoRunning) if (!isDycoRunning)
return { success: false, message: "Dyco is not connected." }; return { success: false, message: "Dyco is not connected." };
console.log(`Closing the connection`); console.log(`Closing the connection`);
try { try {
await PLC.disconnect(); await PLC.disconnect();
isDycoRunning = false; isDycoRunning = false;
return { return {
success: true, success: true,
message: "Dyco Connection is now closed.", message: "Dyco Connection is now closed.",
}; };
} catch (error) { } catch (error) {
console.log(error); console.log(error);
return { return {
success: false, success: false,
message: "There was an error closing the dyco connection.", message: "There was an error closing the dyco connection.",
}; };
} }
}; };

View File

@@ -1,29 +1,29 @@
import { createPlcMonitor } from "./plcController.js"; import { createPlcMonitor } from "./plcController.js";
export const zechettiConnect = () => { export const zechettiConnect = () => {
const config: any = { const config: any = {
controllers: [ controllers: [
{ {
id: "Zecchetti_1", id: "Zecchetti_1",
ip: "192.168.193.97", ip: "192.168.193.97",
slot: 0, slot: 0,
rpi: 250, rpi: 250,
printerId: 22, // grabbed from 2.0 printerId: 22, // grabbed from 2.0
tags: ["N7[0]"], tags: ["N7[0]"],
}, },
{ // {
id: "Zecchetti_2", // id: "Zecchetti_2",
ip: "192.168.193.111", // ip: "192.168.193.111",
slot: 0, // slot: 0,
rpi: 100, // rpi: 100,
printerId: 23, // printerId: 23,
tags: ["N8[0]"], // tags: ["N8[0]"],
}, // },
], ],
}; };
const monitor = createPlcMonitor(config); const monitor = createPlcMonitor(config);
// Start // Start
monitor.start(); monitor.start();
}; };

View File

@@ -41,7 +41,7 @@ export const qualityCycle = async () => {
message: "There was an error getting quality request data", message: "There was an error getting quality request data",
}; };
} }
const lstQData: any = data; const lstQData: any = data ?? [];
// get the pallets that currentStat is moved // get the pallets that currentStat is moved
// const res = await runQuery(palletMoveCheck, "palletCheck"); // const res = await runQuery(palletMoveCheck, "palletCheck");

View File

@@ -1,238 +1,253 @@
import { eq } from "drizzle-orm";
import sql from "mssql"; import sql from "mssql";
import { prodSqlConfig } from "./utils/prodServerConfig.js";
import { createLog } from "../logger/logger.js";
import { db } from "../../../database/dbclient.js"; import { db } from "../../../database/dbclient.js";
import { settings } from "../../../database/schema/settings.js"; import { settings } from "../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { installed } from "../../index.js";
import { checkHostnamePort } from "../../globalUtils/pingServer.js"; import { checkHostnamePort } from "../../globalUtils/pingServer.js";
import { installed } from "../../index.js";
import { createLog } from "../logger/logger.js";
import { serverSettings } from "../server/controller/settings/getSettings.js"; import { serverSettings } from "../server/controller/settings/getSettings.js";
import { prodSqlConfig } from "./utils/prodServerConfig.js";
let pool: any; let pool: sql.ConnectionPool;
let connected: boolean = false; let connected: boolean = false;
export const initializeProdPool = async () => { export const initializeProdPool = async () => {
if (!installed) { if (!installed) {
createLog( createLog(
"info", "info",
"lst", "lst",
"sqlProd", "sqlProd",
"The server was not installed will reconnect in 5 seconds" "The server was not installed will reconnect in 5 seconds",
); );
setTimeout(() => { setTimeout(() => {
initializeProdPool(); initializeProdPool();
}, 5 * 1000); }, 5 * 1000);
return { success: false, message: "The server is not installed." }; return { success: false, message: "The server is not installed." };
} }
// const dbServer = await db // const dbServer = await db
// .select() // .select()
// .from(settings) // .from(settings)
// .where(eq(settings.name, "dbServer")); // .where(eq(settings.name, "dbServer"));
// the move to the go version for settings // the move to the go version for settings
const dbServer = serverSettings.filter( const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer" (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); const serverUp = await checkHostnamePort(
`${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
);
if (!serverUp) { if (!serverUp) {
createLog( createLog(
"error", "error",
"lst", "lst",
"server", "server",
`The sql ${dbServer[0].value} is not reachable` `The sql ${dbServer[0].value} is not reachable`,
); );
return { // closePool()
success: false, // setTimeout(() => {
message: `The sql ${dbServer[0].value} is not reachable`, // initializeProdPool();
data: [], // }, 2*1000);
}; return {
} success: false,
message: `The sql ${dbServer[0].value} is not reachable`,
data: [],
};
}
// make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev // make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev
// const serverLoc = await db // const serverLoc = await db
// .select() // .select()
// .from(settings) // .from(settings)
// .where(eq(settings.name, "dbServer")); // .where(eq(settings.name, "dbServer"));
const serverLoc = serverSettings.filter( const serverLoc = serverSettings.filter(
(n: any) => n.name === "dbServer" (n: any) => n.name === "dbServer",
) as any; ) as any;
if ( if (
serverLoc[0].value === "localhost" && serverLoc[0].value === "localhost" &&
process.env.NODE_ENV !== "development" process.env.NODE_ENV !== "development"
) { ) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
"The server is set to localhost, and you are not in development mode." "The server is set to localhost, and you are not in development mode.",
); );
return { return {
success: false, success: false,
message: message:
"The server is set to localhost, and you are not in development mode.", "The server is set to localhost, and you are not in development mode.",
data: [], data: [],
}; };
} }
// if you were restarting from the endpoint you get this lovely error // if you were restarting from the endpoint you get this lovely error
if (connected) { if (connected) {
createLog("error", "lst", "sqlProd", "There is already a connection."); createLog("error", "lst", "sqlProd", "There is already a connection.");
return { success: false, message: "There is already a connection." }; return { success: false, message: "There is already a connection." };
} }
try { try {
const config = await prodSqlConfig(); const config = await prodSqlConfig();
pool = await sql.connect(config!); pool = new sql.ConnectionPool(config!);
await pool.connect();
createLog( createLog(
"info", "info",
"lst", "lst",
"sqlProd", "sqlProd",
`Connected to ${config?.server}, and looking at ${config?.database}` `Connected to ${config?.server}, and looking at ${config?.database}`,
); );
connected = true; connected = true;
return { return {
success: true, success: true,
message: "The sql server connection has been closed", message: "The sql server connection has been closed",
}; };
} catch (error) { } catch (error) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(error)}, "There was an error connecting to the pool."`,
error );
)}, "There was an error connecting to the pool."` // closePool()
); // setTimeout(() => {
throw new Error("There was an error closing the sql connection"); // initializeProdPool();
} // }, 2*1000);
return {
success: true,
message: "The sql server connection has been closed",
};
//throw new Error("There was an error closing the sql connection");
}
}; };
export const closePool = async () => { export const closePool = async () => {
if (!connected) { if (!connected) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
"There is no connection a connection." "There is no connection a connection.",
); );
return { success: false, message: "There is already a connection." }; return { success: false, message: "There is already a connection." };
} }
try { try {
await pool.close(); await pool.close();
createLog("info", "lst", "sqlProd", "Connection pool closed"); createLog("info", "lst", "sqlProd", "Connection pool closed");
connected = false; connected = false;
return { return {
success: true, success: true,
message: "The sql server connection has been closed", message: "The sql server connection has been closed",
}; };
} catch (error) { } catch (error) {
createLog( createLog(
"error", "error",
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(
error error,
)}, "There was an error closing the sql connection"` )}, "There was an error closing the sql connection"`,
); );
throw new Error("There was an error closing the sql connection"); throw new Error("There was an error closing the sql connection");
} }
}; };
export async function query(queryToRun: string, name: string) { export async function query(queryToRun: string, name: string) {
/** /**
* Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server * Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server
*/ */
// const dbServer = await db // const dbServer = await db
// .select() // .select()
// .from(settings) // .from(settings)
// .where(eq(settings.name, "dbServer")); // .where(eq(settings.name, "dbServer"));
const dbServer = serverSettings.filter( const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer" (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); // const serverUp = await checkHostnamePort(
// `${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
// );
if (!serverUp) { // if (!serverUp) {
createLog( // createLog(
"error", // "error",
"lst", // "lst",
"server", // "server",
`The sql ${dbServer[0].value} is not reachable` // `Failed to run query due to ${dbServer[0].value} not being reachable.`,
); // );
return { // return {
success: false, // success: false,
message: `The sql ${dbServer[0].value} is not reachable`, // message: `Failed to run query due to ${dbServer[0].value} not being reachable.`,
data: [], // data: [],
}; // };
} // }
if (!connected) { if (!connected) {
createLog( createLog(
"error", "error",
"lst", "lst",
"server", "server",
`The sql ${dbServer[0].value} is not connected` `The sql ${dbServer[0].value} is not connected`,
); );
return {
success: false,
message: `The sql ${dbServer[0].value} is not not connected`,
data: [],
};
}
/**
* We no longer need to send over the plant token change as we do it inside the query function.
*/
// const plantToken = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "plantToken"));
const plantToken = serverSettings.filter(
(n: any) => n.name === "plantToken"
) as any;
const query = queryToRun.replaceAll("test1", plantToken[0].value);
try { return {
const result = await pool.request().query(query); success: false,
message: `The sql ${dbServer[0].value} is not not connected`,
data: [],
};
}
/**
* We no longer need to send over the plant token change as we do it inside the query function.
*/
// const plantToken = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "plantToken"));
const plantToken = serverSettings.filter(
(n: any) => n.name === "plantToken",
) as any;
const query = queryToRun.replaceAll("test1", plantToken[0].value);
return { try {
success: true, const result = await pool.request().query(query);
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
if (error.code === "ETIMEOUT") {
createLog(
"error",
"lst",
"sqlProd",
`${JSON.stringify(
error
)}, ${name} did not run due to a timeout.`
);
//throw new Error(`${name} query did not run due to a timeout.`);
return {
success: false,
message: `${name} query did not run due to a timeout.`,
data: [],
};
}
if (error.code === "EREQUEST") { return {
// throw new Error( success: true,
// `${name} encoutnered an error ${error.originalError.info.message}` message: `Query results for: ${name}`,
// ); data: result.recordset,
return { };
success: false, } catch (error: any) {
message: `${name} encoutnered an error ${error.originalError.info.message}`, if (error.code === "ETIMEOUT") {
data: [], createLog(
}; "error",
} "lst",
"sqlProd",
`${JSON.stringify(error)}, ${name} did not run due to a timeout.`,
);
//throw new Error(`${name} query did not run due to a timeout.`);
return {
success: false,
message: `${name} query did not run due to a timeout.`,
data: [],
};
}
//console.log(error.originalError.info.message); if (error.code === "EREQUEST") {
//EREQUEST // throw new Error(
//throw new Error(`${name} encoutnered an error ${error.code}`); // `${name} encoutnered an error ${error.originalError.info.message}`
} // );
return {
success: false,
message: `${name} encoutnered an error ${error.originalError.info.message}`,
data: [],
};
}
//console.log(error.originalError.info.message);
//EREQUEST
//throw new Error(`${name} encoutnered an error ${error.code}`);
}
} }

View File

@@ -61,7 +61,8 @@ V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PP' or V_Artikel.ArtikelvariantenTypBez= 'PP' or
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PET' or V_Artikel.ArtikelvariantenTypBez= 'PET' or
V_Artikel.ArtikelvariantenTypBez= 'PET-P' V_Artikel.ArtikelvariantenTypBez= 'PET-P' or
V_Artikel.ArtikelvariantenTypBez= 'PET-G'
THEN 'MM' THEN 'MM'
WHEN WHEN
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or

View File

@@ -1,83 +1,8 @@
export const deliveryByDateRange = ` export const deliveryByDateRange = `
use AlplaPROD_test1
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
select * from
(select (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant,
AuftragsNummer as OrderNumber,
PositionsNummer as CustomerLineNumber,
AbrufNummer as CustomerReleaseNumber,
CONVERT(date, AbrufLiefertermin) as DeliveryDate,
CONVERT(DATE,JournalDatum) Bol_PrintDate,
AbrufMenge AS OrderQuantity,
AbrufMengeVPK as OrderPallets,
GelieferteMenge AS DeliveredQTY,
GelieferteMengeVPK as DeliverdPallets,
JournalNummer as BOLNum,
ProdArtikelBez AS ProductFamily,
dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdKundenAdresse AS IdCustomer,
dbo.V_LadePlanungenLadeAuftragAbruf.AdressBez AS CustName,
dbo.T_EAIJournal.IdJournalStatus as bolStatus,
V_TrackerAuftragsAbrufe.IdAuftragsAbruf as releaseNum,
V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag as truckPostion
,dbo.V_TrackerAuftragsAbrufe.IdArtikelvarianten as av
,dbo.V_TrackerAuftragsAbrufe.ArtikelVariantenAlias as alias
,'Base Plant' as plantType
from dbo.V_TrackerAuftragsAbrufe (nolock)
left join
dbo.V_LadePlanungenLadeAuftragAbruf on V_TrackerAuftragsAbrufe.IdAuftragsAbruf =
dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdAuftragsAbruf
left join
dbo.T_EAIJournal on dbo.V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag =
dbo.T_EAIJournal.IdLadeAuftrag
left join
dbo.V_ArtikelKomplett on V_TrackerAuftragsAbrufe.IdArtikelVarianten =
dbo.V_ArtikelKomplett.IdArtikelvarianten
where GelieferteMengeVPK > 0 AND (
AbrufLiefertermin IS NULL
OR CONVERT(date, JournalDatum) BETWEEN @StartDate AND @EndDate
)
/*in house*/
union all
select top (50) (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant
,[KundenAuftragsNummer] as OrderNumber
,[KundenPositionsNummer] as CustomerLineNumber
, null as CustomerReleaseNumber
,CONVERT(date, i.Add_Date) as DeliveryDate
,CONVERT(DATE,i.Upd_Date) Bol_PrintDate
,null AS OrderQuantity
,null as OrderPallets
,LieferMengeVereinbart AS DeliveredQTY
,null as DeliverdPallets
,JournalNummer as BOLNum
,null AS ProductFamily
,IdAdresse AS IdCustomer
,null AS CustName
,null as bolStatus
,null as releaseNum
,null as truckPostion
,i.IdArtikelVariante as av
,null as alias
,'In-House' as plantType
--,*
from [dbo].[T_InhouseLieferungen] as i (nolock)
where CONVERT(date, Upd_Date) BETWEEN @StartDate AND @EndDate
) x
order by Bol_PrintDate desc
`;
export const deliveryByDateRangeAndAv = `
use [test1_AlplaPROD2.0_Read] use [test1_AlplaPROD2.0_Read]
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
SELECT SELECT
r.[ArticleHumanReadableId] r.[ArticleHumanReadableId]
,[ReleaseNumber] ,[ReleaseNumber]
@@ -86,12 +11,15 @@ r.[ArticleHumanReadableId]
,[CustomerReleaseNumber] ,[CustomerReleaseNumber]
,[ReleaseState] ,[ReleaseState]
,[DeliveryState] ,[DeliveryState]
,ea.JournalNummer ,ea.JournalNummer as BOL_Number
,[ReleaseConfirmationState] ,[ReleaseConfirmationState]
,[PlanningState] ,[PlanningState]
,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate --,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate ,r.[OrderDate]
,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate --,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
,r.[DeliveryDate]
--,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
,r.[LoadingDate]
,[Quantity] ,[Quantity]
,[DeliveredQuantity] ,[DeliveredQuantity]
,r.[AdditionalInformation1] ,r.[AdditionalInformation1]
@@ -108,6 +36,161 @@ r.[ArticleHumanReadableId]
,[Irradiated] ,[Irradiated]
,r.[CreatedByEdi] ,r.[CreatedByEdi]
,[DeliveryAddressHumanReadableId] ,[DeliveryAddressHumanReadableId]
,DeliveryAddressDescription
,[CustomerArtNo]
,[TotalPrice]
,r.[ArticleAlias]
FROM [order].[Release] (nolock) as r
left join
[order].LineItem as x on
r.LineItemId = x.id
left join
[order].Header as h on
x.HeaderId = h.id
--bol stuff
left join
AlplaPROD_test1.dbo.V_LadePlanungenLadeAuftragAbruf (nolock) as zz
on zz.AbrufIdAuftragsAbruf = r.ReleaseNumber
left join
(select * from (SELECT
ROW_NUMBER() OVER (PARTITION BY IdJournal ORDER BY add_date DESC) AS RowNum
,*
FROM [AlplaPROD_test1].[dbo].[T_Lieferungen] (nolock)) x
where RowNum = 1) as ea on
zz.IdLieferschein = ea.IdJournal
where
--r.ArticleHumanReadableId in ([articles])
--r.ReleaseNumber = 1452
r.DeliveryDate between @StartDate AND @EndDate
and DeliveredQuantity > 0
--and Journalnummer = 169386
`;
// export const deliveryByDateRange = `
// use AlplaPROD_test1
// DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
// DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
// select * from
// (select (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant,
// AuftragsNummer as OrderNumber,
// PositionsNummer as CustomerLineNumber,
// AbrufNummer as CustomerReleaseNumber,
// CONVERT(date, AbrufLiefertermin) as DeliveryDate,
// CONVERT(DATE,JournalDatum) Bol_PrintDate,
// AbrufMenge AS OrderQuantity,
// AbrufMengeVPK as OrderPallets,
// GelieferteMenge AS DeliveredQTY,
// GelieferteMengeVPK as DeliverdPallets,
// JournalNummer as BOLNum,
// ProdArtikelBez AS ProductFamily,
// dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdKundenAdresse AS IdCustomer,
// dbo.V_LadePlanungenLadeAuftragAbruf.AdressBez AS CustName,
// dbo.T_EAIJournal.IdJournalStatus as bolStatus,
// V_TrackerAuftragsAbrufe.IdAuftragsAbruf as releaseNum,
// V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag as truckPostion
// ,dbo.V_TrackerAuftragsAbrufe.IdArtikelvarianten as av
// ,dbo.V_TrackerAuftragsAbrufe.ArtikelVariantenAlias as alias
// ,'Base Plant' as plantType
// from dbo.V_TrackerAuftragsAbrufe (nolock)
// left join
// dbo.V_LadePlanungenLadeAuftragAbruf on V_TrackerAuftragsAbrufe.IdAuftragsAbruf =
// dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdAuftragsAbruf
// left join
// dbo.T_EAIJournal on dbo.V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag =
// dbo.T_EAIJournal.IdLadeAuftrag
// left join
// dbo.V_ArtikelKomplett on V_TrackerAuftragsAbrufe.IdArtikelVarianten =
// dbo.V_ArtikelKomplett.IdArtikelvarianten
// where GelieferteMengeVPK > 0 AND (
// AbrufLiefertermin IS NULL
// OR CONVERT(date, JournalDatum) BETWEEN @StartDate AND @EndDate
// )
// /*in house*/
// union all
// select top (50) (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant
// ,[KundenAuftragsNummer] as OrderNumber
// ,[KundenPositionsNummer] as CustomerLineNumber
// , null as CustomerReleaseNumber
// ,CONVERT(date, i.Add_Date) as DeliveryDate
// ,CONVERT(DATE,i.Upd_Date) Bol_PrintDate
// ,null AS OrderQuantity
// ,null as OrderPallets
// ,LieferMengeVereinbart AS DeliveredQTY
// ,null as DeliverdPallets
// ,JournalNummer as BOLNum
// ,null AS ProductFamily
// ,IdAdresse AS IdCustomer
// ,null AS CustName
// ,null as bolStatus
// ,null as releaseNum
// ,null as truckPostion
// ,i.IdArtikelVariante as av
// ,null as alias
// ,'In-House' as plantType
// --,*
// from [dbo].[T_InhouseLieferungen] as i (nolock)
// where CONVERT(date, Upd_Date) BETWEEN @StartDate AND @EndDate
// ) x
// order by Bol_PrintDate desc
// `;
export const deliveryByDateRangeAndAv = `
use [test1_AlplaPROD2.0_Read]
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
SELECT
r.[ArticleHumanReadableId]
,[ReleaseNumber]
,h.CustomerOrderNumber
,x.CustomerLineItemNumber
,[CustomerReleaseNumber]
,[ReleaseState]
,[DeliveryState]
,ea.JournalNummer as BOL_Number
,[ReleaseConfirmationState]
,[PlanningState]
--,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
,r.[OrderDate]
--,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
,r.[DeliveryDate]
--,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
,r.[LoadingDate]
,[Quantity]
,[DeliveredQuantity]
,r.[AdditionalInformation1]
,r.[AdditionalInformation2]
,[TradeUnits]
,[LoadingUnits]
,[Trucks]
,[LoadingToleranceType]
,[SalesPrice]
,[Currency]
,[QuantityUnit]
,[SalesPriceRemark]
,r.[Remark]
,[Irradiated]
,r.[CreatedByEdi]
,[DeliveryAddressHumanReadableId]
,DeliveryAddressDescription
,[CustomerArtNo] ,[CustomerArtNo]
,[TotalPrice] ,[TotalPrice]
,r.[ArticleAlias] ,r.[ArticleAlias]
@@ -141,8 +224,8 @@ where
r.ArticleHumanReadableId in ([articles]) r.ArticleHumanReadableId in ([articles])
--r.ReleaseNumber = 1452 --r.ReleaseNumber = 1452
and r.DeliveryDate between '[startDate]' and '[endDate]' and r.DeliveryDate between @StartDate AND @EndDate
--and DeliveredQuantity > 0
order by DeliveryDate desc --and Journalnummer = 169386
`; `;

View File

@@ -1,5 +1,5 @@
export const orderState = ` export const orderState = `
SELECT top(10000) SELECT
CustomerOrderNumber CustomerOrderNumber
,r.CustomerReleaseNumber ,r.CustomerReleaseNumber
, OrderState , OrderState
@@ -21,6 +21,6 @@ CustomerOrderNumber
where where
--h.CreatedByEdi = 1 --h.CreatedByEdi = 1
r.ReleaseState > 0 r.ReleaseState >= 1
--and CustomerOrderNumber in ( '2358392') --and CustomerOrderNumber in ( '2358392')
`; `;

View File

@@ -0,0 +1,14 @@
use [test1_AlplaPROD2.0_Read]
select
humanreadableId as addressId
,ContactEmail as email
,ContactName
,ContactPhoneNumber
,Name
,Street
,City
,ZipCode
--,*
from [masterData].[Address] (nolock)
where humanreadableid = [customerAddress]

View File

@@ -0,0 +1,43 @@
use AlplaPROD_test1
/**
check if we have any new alpla bols that were created
*/
SELECT
x.idladeplanung
,e.idjournal
,e.journalNummer as bol
,e.idjournalstatus
,e.ladeDatum as loadDate
,e.bemerkung
,e.ereporting_idwerk
,e.journalDatum
,a.idadressen as addressId
,a.bezeichnung as addressDescription
,a.strasse as streetAddress
,a.ort as cityState
,a.plz as zipcode
,idauftrag as releaseNumber
--,*
FROM [dbo].[T_EAIJournal] as e with (nolock)
-- pull in the address so we only pull in florence data
left join
[dbo].[T_EAIJournalAdresse] as a with (nolock) on
a.[IdJournalAdresse] = [IdJournalKundenAdresse]
-- get the table to link the pallets to the bol
left join
[dbo].[T_EAIJournalPosition] as x with (nolock) on
x.idjournal = e.idjournal
where idjournalStatus = 62
--and idadressen = 270
and a.bezeichnung like '%Alpla%' -- we only want to monitor for addresses that are linked to alpla.
and JournalDatum > DATEADD(MINUTE, -[timeCheck], GETDATE())
and e.journalNummer not in ([ignoreBols])
and idauftrag > 1 -- this will ignore all incoming goodsv as we are really only looking for outbound deliveries
order by JournalDatum desc

View File

@@ -0,0 +1,33 @@
/*
checks the age of an inventory dose not exceed x time
*/
use AlplaPROD_test1
DECLARE @timeCheck INT = [timeTest]
select
w.IdWarenLager as idWarehouse
,w.KurzBezeichnung as warehouse
,b.IdLagerAbteilung as locationId
,x.KurzBezeichnung as 'location'
--,case when b.upd_date < Dateadd(minute, -(@timeCheck * 1.5), getdate()) then 'OVERDUE' else 'In-Progress' end as invStatus
,format(b.Upd_Date, 'M/d/yyyy HH:mm') as cycleCountStartAt
,b.Upd_User as blockedBy
--,*
from [dbo].[V_LagerAbteilungenInventuren] (nolock) as b
-- get the loction name
left join
dbo.T_LagerAbteilungen (nolock) as x
on x.IdLagerAbteilung = b.IdLagerAbteilung
-- get the whse
left join
dbo.T_WarenLager (nolock) as w
on x.idWarenLager = w.idWarenLager
where status = 1
and b.Upd_Date < Dateadd(minute, -@timeCheck, getdate())

View File

@@ -0,0 +1,8 @@
/*
disables sql jobs.
*/
EXEC msdb.dbo.sp_update_job @job_name = N'[jobName]', @enabled = 0;
-- DECLARE @JobName varchar(max) = '[jobName]'
-- UPDATE msdb.dbo.sysjobs
-- SET enabled = 0
-- WHERE name = @JobName;

View File

@@ -0,0 +1,78 @@
/*
This query will return a single running number as long as its in stock.
To get all data comment out the lfdnr in the where statmen
*/
use AlplaPROD_test1
DECLARE @runningNumber nvarchar(max) = '[runningNr]' -- when saving in lst should be '[runningNr]'
select x.idartikelVarianten as av,
ArtikelVariantenAlias as alias,
x.Lfdnr as runningNumber,
round(sum(EinlagerungsMengeVPKSum),0) as totalPallets,
sum(EinlagerungsMengeSum) as totalPalletQTY,
round(sum(VerfuegbareMengeVPKSum),0) as avaliblePallets,
sum(VerfuegbareMengeSum) as avaliablePalletQTY,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as coaPallets,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as coaQTY,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as heldPallets,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as heldQTY
,IdProdPlanung as lot
,IdAdressen as addressID,
x.AdressBez as addressDescription
,x.IdLagerAbteilung as locationId
,x.lagerabteilungkurzbez as location
,lot.machine
,produktionsdatummin as productionDate
,'728'
+ RIGHT(CAST(YEAR(produktionsdatummin) AS varchar(4)), 1)
+ CAST(DATEDIFF(DAY, DATEFROMPARTS(YEAR(produktionsdatummin), 1, 1), produktionsdatummin) + 1 AS varchar(3))
+ CAST(lot.machine AS varchar(10)) as batch
,c.Description as blockingReason
,x.Barcode as barcode
--,*
from dbo.[V_LagerPositionenBarcodes] (nolock) x
left join
dbo.T_EtikettenGedruckt as l(nolock) on
x.Lfdnr = l.Lfdnr AND l.Lfdnr > 1
left join
(SELECT *
FROM [dbo].[T_BlockingDefects] where Active = 1) as c
on x.IdMainDefect = c.IdBlockingDefect
/*
get lot and machine info
*/
left join
(select location as machine,
runningnumber as lot
,planstart
,planend
from [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] (nolock) x
left join
[test1_AlplaPROD2.0_Read].[masterData].[Machine] (nolock) m on
m.id = x.machineid) as lot on
lot.lot = IdProdPlanung
/*
The data below will be controlled by the user in excel by default everything will be passed over
IdAdressen = 3
*/
where IdArtikelTyp = 1
and x.IdWarenlager in (1) -- the pallet must be in ppoo
and x.Lfdnr = @runningNumber -- comment this out when you want to get everything
group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description, IdAdressen,
x.AdressBez , x.Lfdnr,
IdProdPlanung
,x.IdLagerAbteilung
,x.lagerabteilungkurzbez
,lot.machine
,produktionsdatummin
,x.Barcode
order by x.IdArtikelVarianten

View File

@@ -0,0 +1,37 @@
use AlplaPROD_test1
select * from (SELECT
p.[IdLadePlanung]
,p.[Beleg] as lotNumber
,p.[LfdNrJeArtikelKunde] as runningNr
,p.[Barcode]
,p.[ProduktionsDatum] as productionDate
,p.[Add_User] as scanDate
,p.[Add_Date]
,p.[Upd_User]
,p.[Upd_Date]
,p.[IdJournalWarenPosition]
,p.[LieferMenge] as qty
-- ,av.IdArtikelvarianten as article
-- ,av.Bezeichnung as alias
,av.articlehumanreadableid as article
,av.ArticleDescription as alias
--,[SSCC_ReserveZiffer]
--,ROW_NUMBER() OVER (PARTITION BY p.[LfdNrJeArtikelKunde] ORDER BY p.upd_date DESC) AS RowNum
--,*
FROM [dbo].[T_EAIJournalLieferPosition] as p (nolock)
-- left join
-- dbo.T_ProdPlanung as l on
-- l.IdProdPlanung = p.Beleg
left join
[test1_AlplaPROD2.0_Read].labelling.InternalLabel as av on
av.RunningNumber = p.[LfdNrJeArtikelKunde]
) as a
where idladeplanung in ([palLinkedToBol])
--and RowNum = 1
order by runningNr

View File

@@ -157,3 +157,259 @@ where lot.ProductionLotHumanReadableId = @lot and MaterialDescription not like '
and MaterialDescription NOT LIKE '%bb%' and MaterialDescription NOT LIKE '%bb%'
and MaterialDescription NOT LIKE '%mcg%' and MaterialDescription NOT LIKE '%mcg%'
`; `;
export const something = [
{
MaterialHumanReadableId: 98,
MaterialDescription: "BAN Banding 51544 1cyc",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 352,
totalNeeded: 66.2904,
noMMShortage: null,
noPKGAutoShortage: "pkgAutoGood",
noPKGManualShortage: "noManPkg",
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 1314200,
Percentage: 0,
QuantityPosition: 4,
"": null,
},
{
MaterialHumanReadableId: 174,
MaterialDescription: "MB PE Ampacet BW 11744",
Staged: 0,
isManual: true,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 162.728623,
totalNeeded: 1.814699,
noMMShortage: null,
noPKGAutoShortage: null,
noPKGManualShortage: null,
noManualShortage: "noOK",
autoConsumeCheck: "autoConsumeNOK",
invForAutoConsume: null,
Percentage: 2.19,
QuantityPosition: null,
"": 0,
},
{
MaterialHumanReadableId: 99,
MaterialDescription: "TOP Plastic 6040643 44x56x4 w/o CB 30cyc",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 88,
totalNeeded: 66.2904,
noMMShortage: null,
noPKGAutoShortage: "pkgAutoGood",
noPKGManualShortage: "noManPkg",
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 2048,
Percentage: 0,
QuantityPosition: 1,
"": null,
},
{
MaterialHumanReadableId: 119,
MaterialDescription: "MM HDPE PCR KW Plastics KWR 101-150",
Staged: 1,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 53643.717,
consumption: 0,
totalDemand: 3744.977905,
totalNeeded: 41.762952,
noMMShortage: null,
noPKGAutoShortage: null,
noPKGManualShortage: null,
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 53754.112,
Percentage: 50.4,
QuantityPosition: null,
"": 0,
},
{
MaterialHumanReadableId: 504,
MaterialDescription: "LBL IML Label F 1.8L Evolution 1265677",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 79357.090909,
totalNeeded: 1,
noMMShortage: null,
noPKGAutoShortage: null,
noPKGManualShortage: null,
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 900500,
Percentage: 0,
QuantityPosition: null,
"": 1,
},
{
MaterialHumanReadableId: 176,
MaterialDescription: "MM HDPE Dow DMDF 6230",
Staged: 1,
isManual: false,
IsMainMaterial: true,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 74063.734,
consumption: 0,
totalDemand: 3522.805744,
totalNeeded: 39.285348,
noMMShortage: "mmGood",
noPKGAutoShortage: null,
noPKGManualShortage: null,
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 182624.771,
Percentage: 47.41,
QuantityPosition: null,
"": 0,
},
{
MaterialHumanReadableId: 397,
MaterialDescription: "STW Film 20x45ga 180567",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 17.6,
totalNeeded: 66.2904,
noMMShortage: null,
noPKGAutoShortage: "pkgAutoGood",
noPKGManualShortage: "noManPkg",
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 1063.92,
Percentage: 0,
QuantityPosition: 0.2,
"": null,
},
{
MaterialHumanReadableId: 96,
MaterialDescription: "PAL PRA 44x56x5 50cyc",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 88,
totalNeeded: 66.2904,
noMMShortage: null,
noPKGAutoShortage: "pkgAutoGood",
noPKGManualShortage: "noManPkg",
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 1529,
Percentage: 0,
QuantityPosition: 1,
"": null,
},
{
MaterialHumanReadableId: 505,
MaterialDescription: "LBL IML Label B 1.8L Evolution 1265678",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 79357.090909,
totalNeeded: 1,
noMMShortage: null,
noPKGAutoShortage: null,
noPKGManualShortage: null,
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 903000,
Percentage: 0,
QuantityPosition: null,
"": 1,
},
{
MaterialHumanReadableId: 97,
MaterialDescription: "SSH Plastic 48100349 44x56x0.06 30cyc",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 792,
totalNeeded: 66.2904,
noMMShortage: null,
noPKGAutoShortage: "pkgAutoGood",
noPKGManualShortage: "noManPkg",
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 29962,
Percentage: 0,
QuantityPosition: 9,
"": null,
},
{
MaterialHumanReadableId: 169,
MaterialDescription: "LBL Label 4x6 white 9396",
Staged: 0,
isManual: false,
IsMainMaterial: false,
TotalPlannedLoadingUnits: 88,
TotalProducedLoadingUnits: 0,
remainingPallets: 88,
Provided: 0,
consumption: 0,
totalDemand: 264,
totalNeeded: 66.2904,
noMMShortage: null,
noPKGAutoShortage: "pkgAutoGood",
noPKGManualShortage: "noManPkg",
noManualShortage: null,
autoConsumeCheck: "autoConsumeOk",
invForAutoConsume: 55637,
Percentage: 0,
QuantityPosition: 3,
"": null,
},
];

View File

@@ -1,5 +1,5 @@
export const forecastData = ` export const forecastData = `
SELECT format(cast(RequirementDate as date),'M/d/yyyy') as requirementDate SELECT RequirementDate as requirementDate
,ArticleHumanReadableId ,ArticleHumanReadableId
,CustomerArticleNumber ,CustomerArticleNumber
,ArticleDescription ,ArticleDescription

View File

@@ -1,34 +1,34 @@
export const planningNumbersByAVDate = ` export const planningNumbersByAVDate = `
use AlplaPROD_test1 use AlplaPROD_test1
declare @start_date nvarchar(30) = '[startDate]' --'2025-01-01' declare @start_date nvarchar(30) = '[startDate]' --'2025-01-01'
declare @end_date nvarchar(30) = '[endDate]' --'2025-08-09' declare @end_date nvarchar(30) = '[endDate]' --'2025-08-09'
/* /*
articles will need to be passed over as well as the date structure we want to see articles will need to be passed over as well as the date structure we want to see
*/ */
select x.IdArtikelvarianten As Article, select x.IdArtikelvarianten As Article,
ProduktionAlias as Description, ProduktionAlias as Description,
standort as MachineId, standort as MachineId,
MaschinenBezeichnung as MachineName, MaschinenBezeichnung as MachineName,
--MaschZyklus as PlanningCycleTime, --MaschZyklus as PlanningCycleTime,
x.IdProdPlanung as LotNumber, x.IdProdPlanung as LotNumber,
FORMAT(ProdTag, 'MM/dd/yyyy') as ProductionDay, FORMAT(ProdTag, 'MM/dd/yyyy') as ProductionDay,
x.planMenge as TotalPlanned, x.planMenge as TotalPlanned,
ProduktionMenge as QTYPerDay, ProduktionMenge as QTYPerDay,
round(ProduktionMengeVPK, 2) PalDay, round(ProduktionMengeVPK, 2) PalDay,
Status as finished Status as finished
--MaschStdAuslastung as nee --MaschStdAuslastung as nee
from dbo.V_ProdLosProduktionJeProdTag_PLANNING (nolock) as x from dbo.V_ProdLosProduktionJeProdTag_PLANNING (nolock) as x
left join left join
dbo.V_ProdPlanung (nolock) as p on dbo.V_ProdPlanung (nolock) as p on
x.IdProdPlanung = p.IdProdPlanung x.IdProdPlanung = p.IdProdPlanung
where ProdTag between @start_date and @end_date where ProdTag between @start_date and @end_date
and p.IdArtikelvarianten in ([articles]) and p.IdArtikelvarianten in ([articles])
--and V_ProdLosProduktionJeProdTag_PLANNING.IdKunde = 10 --and V_ProdLosProduktionJeProdTag_PLANNING.IdKunde = 10
--and IdProdPlanung = 18442 --and IdProdPlanung = 18442
order by ProdTag desc order by ProdTag desc
`; `;

View File

@@ -29,7 +29,7 @@ left join
alplaprod_test1.dbo.V_LagerPositionenBarcodes (nolock) as l on alplaprod_test1.dbo.V_LagerPositionenBarcodes (nolock) as l on
ext.RunningNumber = l.Lfdnr ext.RunningNumber = l.Lfdnr
WHERE ext.SsccEanRunningNumber IN (@runningNumber) and WHERE ext.RunningNumber IN (@runningNumber) and
ext.RunningNumber NOT IN ( ext.RunningNumber NOT IN (
SELECT RunningNumber FROM [test1_AlplaPROD2.0_Read].[labelling].[InternalLabel] WHERE RunningNumber IN (@runningNumber) SELECT RunningNumber FROM [test1_AlplaPROD2.0_Read].[labelling].[InternalLabel] WHERE RunningNumber IN (@runningNumber)
) )

View File

@@ -4,48 +4,47 @@ import { createLog } from "../../logger/logger.js";
import { serverSettings } from "../../server/controller/settings/getSettings.js"; import { serverSettings } from "../../server/controller/settings/getSettings.js";
export const prodSqlConfig = async () => { export const prodSqlConfig = async () => {
try { try {
//const serverSetting = await db.select().from(settings); //const serverSetting = await db.select().from(settings);
const serverSetting = serverSettings as any; const serverSetting = serverSettings as any;
// create dummy type data // create dummy type data
const server = serverSetting.filter((s: any) => s.name === "dbServer"); const server = serverSetting.filter((s: any) => s.name === "dbServer");
const plantToken = serverSetting.filter( const plantToken = serverSetting.filter(
(s: any) => s.name === "plantToken" (s: any) => s.name === "plantToken",
); );
const dbUser = serverSetting.filter((s: any) => s.name === "dbUser"); const dbUser = serverSetting.filter((s: any) => s.name === "dbUser");
// if erroring out double check the password was actually encoded before saving // if erroring out double check the password was actually encoded before saving
const dbPassword = serverSetting.filter( const dbPassword = serverSetting.filter((s: any) => s.name === "dbPass");
(s: any) => s.name === "dbPass"
);
const sqlConfig = { const sqlConfig = {
server: server[0].value, server:
database: `AlplaPROD_${plantToken[0].value}_cus`, process.env.NODE_ENV !== "development" ? "localhost" : server[0].value,
user: dbUser[0].value, database: `AlplaPROD_${plantToken[0].value}_cus`,
password: atob(dbPassword[0].value), user: dbUser[0].value,
options: { password: atob(dbPassword[0].value),
encrypt: true, options: {
trustServerCertificate: true, encrypt: true,
}, trustServerCertificate: true,
requestTimeout: 90000, // in milliseconds },
pool: { requestTimeout: 90000, // in milliseconds
max: 20, // Maximum number of connections in the pool pool: {
min: 0, // Minimum number of connections in the pool max: 20, // Maximum number of connections in the pool
idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released min: 0, // Minimum number of connections in the pool
reapIntervalMillis: 1000, // how often to check for idle resourses to destory idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released
acquireTimeoutMillis: 100000, // How long until a complete timeout happens reapIntervalMillis: 1000, // how often to check for idle resourses to destory
}, acquireTimeoutMillis: 100000, // How long until a complete timeout happens
}; },
};
return sqlConfig; return sqlConfig;
} catch (error) { } catch (error) {
createLog( createLog(
"info", "info",
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(
error error,
)} "There was an error getting/setting up the config for the prod sql server."` )} "There was an error getting/setting up the config for the prod sql server."`,
); );
} }
}; };

View File

@@ -0,0 +1,28 @@
import { readFileSync } from "fs";
export type SqlQuery = {
query: string;
success: boolean;
message: string;
};
export const sqlQuerySelector = (name: string) => {
try {
const queryFile = readFileSync(
new URL(`../querys/newQueries/${name}.sql`, import.meta.url),
"utf8",
);
return {
success: true,
message: `Query for: ${name}`,
query: queryFile,
};
} catch (error) {
return {
success: false,
message:
"Error getting the query file, please make sure you have the correct name.",
};
}
};

View File

@@ -10,7 +10,8 @@
"dev:front": "cd frontend && npm run dev", "dev:front": "cd frontend && npm run dev",
"dev:db:migrate": "npx drizzle-kit push", "dev:db:migrate": "npx drizzle-kit push",
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts", "dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
"dev": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"", "dev": "concurrently -n \"server,oldServer\" -c \"#007755, #1F73D1\" \"npm run dev:app\" \"npm run dev:old\"",
"dev:all": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"",
"dev:old": "cd lstV2 && npm run dev", "dev:old": "cd lstV2 && npm run dev",
"copy:docs": "node scripts/lstDocCopy.mjs", "copy:docs": "node scripts/lstDocCopy.mjs",
"build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs", "build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs",

View File

@@ -0,0 +1,112 @@
import net from "net";
/**
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
*/
const scannerID = "98@";
const scannerCommand = "Alplaprodcmd10"; // to consume all the pallets
const lot = "AlplaPRODchg#00000016700"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it
const labels = [
"1000000000000000000000000000000005512460",
"1000000000000000000000000000000005512470",
"1000000000000000000000000000000005512480",
"1000000000000000000000000000000005512490",
"1000000000000000000000000000000005512500",
"1000000000000000000000000000000005512510",
"1000000000000000000000000000000005512520",
"1000000000000000000000000000000005512530",
"1000000000000000000000000000000005512540",
"1000000000000000000000000000000005512550",
"1000000000000000000000000000000005512560",
"1000000000000000000000000000000005512570",
"1000000000000000000000000000000005512580",
"1000000000000000000000000000000005512590",
"1000000000000000000000000000000005512600",
"1000000000000000000000000000000005512610",
"1000000000000000000000000000000005512620",
"1000000000000000000000000000000005512630",
"1000000000000000000000000000000005512640",
"1000000000000000000000000000000005512650",
"1000000000000000000000000000000005512660",
"1000000000000000000000000000000005512670",
"1000000000000000000000000000000005512680",
"1000000000000000000000000000000005512690",
"1000000000000000000000000000000005512700",
"1000000000000000000000000000000005512710",
"1000000000000000000000000000000005512720",
"1000000000000000000000000000000005512730",
"1000000000000000000000000000000005512740",
"1000000000000000000000000000000005512750",
"1000000000000000000000000000000005512760",
"1000000000000000000000000000000005512770",
"1000000000000000000000000000000005512780",
"1000000000000000000000000000000005512790",
"1000000000000000000000000000000005512800",
"1000000000000000000000000000000005512810",
"1000000000000000000000000000000005512820",
"1000000000000000000000000000000005512830",
"1000000000000000000000000000000005512840",
"1000000000000000000000000000000005512850",
"1000000000000000000000000000000005512860",
"1000000000000000000000000000000005512870",
"1000000000000000000000000000000005512880",
"1000000000000000000000000000000005512890",
"1000000000000000000000000000000005512900",
"1000000000000000000000000000000005512910",
"1000000000000000000000000000000005512920",
"1000000000000000000000000000000005512930",
"1000000000000000000000000000000005512940",
"1000000000000000000000000000000005512950",
"1000000000000000000000000000000005512960",
];
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
scanner.connect(50000, "10.204.0.26", async () => {
console.log("Connected to scanner");
// change the scanner to the to 112
let message = Buffer.from(
`${STX}${scannerID}${scannerCommand}${ETX}`,
"ascii",
);
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 2000));
// bookin all the pallets in the array
await new Promise((resolve) => setTimeout(resolve, 2000));
for (let i = 0; i < labels.length; i++) {
const l = labels[i];
message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 1200));
}
await new Promise((resolve) => setTimeout(resolve, 1500));
scanner.destroy();
});
scanner.on("data", async (data) => {
console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
});
scanner.on("close", () => {
console.log("Connection closed");
});
scanner.on("error", (err) => {
console.error("Scanner error:", err);
});

View File

@@ -0,0 +1,100 @@
import net from "net";
/**
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
*/
const scannerID = "98@";
const scannerCommand = "Alplaprodcmd112"; // to consume all the pallets
const lot = "AlplaPRODchg#11601"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it
const labels = [
"1000000000000000000000000000000004551860",
"1000000000000000000000000000000004551640",
"1000000000000000000000000000000004551840",
"1000000000000000000000000000000004551610",
"1000000000000000000000000000000004551720",
"1000000000000000000000000000000004551680",
"1000000000000000000000000000000004551740",
"1000000000000000000000000000000004551660",
"1000000000000000000000000000000004551570",
"1000000000000000000000000000000004551480",
"1000000000000000000000000000000004551510",
"1000000000000000000000000000000004551460",
"1000000000000000000000000000000004551600",
"1000000000000000000000000000000004551340",
"1000000000000000000000000000000004551580",
"1000000000000000000000000000000004551330",
"1000000000000000000000000000000004551290",
"1000000000000000000000000000000004551180",
"1000000000000000000000000000000004551260",
"1000000000000000000000000000000004551150",
"1000000000000000000000000000000004551390",
"1000000000000000000000000000000004551440",
"1000000000000000000000000000000004551360",
"1000000000000000000000000000000004551400",
"1000000000000000000000000000000004544780",
"1000000000000000000000000000000004551230",
"1000000000000000000000000000000004544770",
"1000000000000000000000000000000004551200",
"1000000000000000000000000000000004544850",
"1000000000000000000000000000000004548370",
"1000000000000000000000000000000004544840",
"1000000000000000000000000000000004548470",
"1000000000000000000000000000000004611380",
"1000000000000000000000000000000004611470",
"1000000000000000000000000000000004611440",
];
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
scanner.connect(50001, "10.80.0.26", async () => {
console.log("Connected to scanner");
// change the scanner to the to 112
let message = Buffer.from(
`${STX}${scannerID}${scannerCommand}${ETX}`,
"ascii",
);
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 2000));
// consume all the pallets in the array
await new Promise((resolve) => setTimeout(resolve, 2000));
for (let i = 0; i < labels.length; i++) {
const l = labels[i];
// trigger the lot
let message = Buffer.from(`${STX}${scannerID}${lot}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 1200));
}
await new Promise((resolve) => setTimeout(resolve, 1500));
scanner.destroy();
});
scanner.on("data", async (data) => {
console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
});
scanner.on("close", () => {
console.log("Connection closed");
});
scanner.on("error", (err) => {
console.error("Scanner error:", err);
});

View File

@@ -0,0 +1,187 @@
import net from "net";
/**
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
*/
const prodIP = "10.204.0.26";
const prodPort = 50000;
const scannerID = "98@";
const scannerCommand = "AlplaPRODcmd00000042#000028643"; // top of the picksheet
const scannerCommand2 = ""; // bottom of the pick sheet
const labels = [
"1000000000000000000000000000000005572620",
"1000000000000000000000000000000005572630",
"1000000000000000000000000000000005572640",
"1000000000000000000000000000000005572650",
"1000000000000000000000000000000005572660",
"1000000000000000000000000000000005572670",
"1000000000000000000000000000000005572680",
"1000000000000000000000000000000005572690",
"1000000000000000000000000000000005572700",
"1000000000000000000000000000000005572710",
"1000000000000000000000000000000005572720",
"1000000000000000000000000000000005572730",
"1000000000000000000000000000000005572740",
"1000000000000000000000000000000005572750",
"1000000000000000000000000000000005572760",
"1000000000000000000000000000000005572770",
"1000000000000000000000000000000005572780",
"1000000000000000000000000000000005572790",
"1000000000000000000000000000000005572800",
"1000000000000000000000000000000005572810",
"1000000000000000000000000000000005572820",
"1000000000000000000000000000000005572830",
"1000000000000000000000000000000005572840",
"1000000000000000000000000000000005572850",
"1000000000000000000000000000000005572860",
"1000000000000000000000000000000005572870",
"1000000000000000000000000000000005572880",
"1000000000000000000000000000000005572890",
"1000000000000000000000000000000005572900",
"1000000000000000000000000000000005572910",
"1000000000000000000000000000000005573226",
"1000000000000000000000000000000005573236",
"1000000000000000000000000000000005573246",
"1000000000000000000000000000000005573256",
"1000000000000000000000000000000005573266",
"1000000000000000000000000000000005573276",
"1000000000000000000000000000000005573286",
"1000000000000000000000000000000005573296",
"1000000000000000000000000000000005573306",
"1000000000000000000000000000000005573316",
"1000000000000000000000000000000005573326",
"1000000000000000000000000000000005573336",
"1000000000000000000000000000000005573346",
"1000000000000000000000000000000005573356",
"1000000000000000000000000000000005573366",
"1000000000000000000000000000000005573376",
"1000000000000000000000000000000005573386",
"1000000000000000000000000000000005573396",
"1000000000000000000000000000000005573406",
"1000000000000000000000000000000005573416",
"1000000000000000000000000000000005573426",
"1000000000000000000000000000000005573436",
"1000000000000000000000000000000005573446",
"1000000000000000000000000000000005573456",
"1000000000000000000000000000000005573466",
"1000000000000000000000000000000005573476",
"1000000000000000000000000000000005573486",
"1000000000000000000000000000000005573496",
"1000000000000000000000000000000005573506",
"1000000000000000000000000000000005573516",
"1000000000000000000000000000000005581616",
"1000000000000000000000000000000005581626",
"1000000000000000000000000000000005581636",
"1000000000000000000000000000000005581646",
"1000000000000000000000000000000005581656",
"1000000000000000000000000000000005581666",
"1000000000000000000000000000000005581676",
"1000000000000000000000000000000005581686",
"1000000000000000000000000000000005581696",
"1000000000000000000000000000000005581706",
"1000000000000000000000000000000005581716",
"1000000000000000000000000000000005581726",
"1000000000000000000000000000000005581736",
"1000000000000000000000000000000005581746",
"1000000000000000000000000000000005581756",
"1000000000000000000000000000000005581766",
"1000000000000000000000000000000005581776",
"1000000000000000000000000000000005581786",
"1000000000000000000000000000000005581796",
"1000000000000000000000000000000005581806",
"1000000000000000000000000000000005581816",
"1000000000000000000000000000000005581826",
"1000000000000000000000000000000005581836",
"1000000000000000000000000000000005581846",
"1000000000000000000000000000000005581856",
"1000000000000000000000000000000005582760",
"1000000000000000000000000000000005581866",
"1000000000000000000000000000000005581876",
"1000000000000000000000000000000005581886",
"1000000000000000000000000000000005581896",
"1000000000000000000000000000000005581906",
"1000000000000000000000000000000005581310",
"1000000000000000000000000000000005581320",
"1000000000000000000000000000000005581330",
"1000000000000000000000000000000005581340",
"1000000000000000000000000000000005581350",
"1000000000000000000000000000000005581360",
"1000000000000000000000000000000005581370",
"1000000000000000000000000000000005581380",
"1000000000000000000000000000000005581390",
"1000000000000000000000000000000005581400",
"1000000000000000000000000000000005581410",
"1000000000000000000000000000000005581420",
"1000000000000000000000000000000005581430",
"1000000000000000000000000000000005581440",
"1000000000000000000000000000000005581450",
"1000000000000000000000000000000005581460",
"1000000000000000000000000000000005581470",
"1000000000000000000000000000000005581480",
"1000000000000000000000000000000005581490",
"1000000000000000000000000000000005581500",
"1000000000000000000000000000000005581510",
"1000000000000000000000000000000005581520",
"1000000000000000000000000000000005581530",
"1000000000000000000000000000000005581540",
"1000000000000000000000000000000005581550",
"1000000000000000000000000000000005581560",
"1000000000000000000000000000000005581570",
"1000000000000000000000000000000005581580",
"1000000000000000000000000000000005581590",
"1000000000000000000000000000000005581600",
];
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
scanner.connect(prodPort, prodIP, async () => {
console.log("Connected to scanner");
const message = Buffer.from(
`${STX}${scannerID}${scannerCommand}${ETX}`,
"ascii",
);
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 2000));
for (let i = 0; i < labels.length; i++) {
const l = labels[i];
const message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 1200));
}
// //close the incoming
// await new Promise(resolve => setTimeout(resolve, 1500));
// const message2 = Buffer.from(`${STX}${scannerID}${scannerCommand2}${ETX}`, "ascii");
// console.log("Sending:", message2.toString("ascii"));
// scanner.write(message2);
await new Promise((resolve) => setTimeout(resolve, 1500));
scanner.destroy();
});
scanner.on("data", async (data) => {
console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
});
scanner.on("close", () => {
console.log("Connection closed");
});
scanner.on("error", (err) => {
console.error("Scanner error:", err);
});