Compare commits

..

30 Commits

Author SHA1 Message Date
dcfa56bdb9 fix(notify): fixed to plantto plant that would cause multiple emails to be sent and never update 2026-03-11 15:30:53 -05:00
ea92422bb1 feat(notification): plant to plant edi 2026-03-10 08:18:29 -05:00
2111a5fdc9 refactor(sql): changes to improve the job disable 2026-02-18 08:53:19 -06:00
6edd20585f refactor(stats): added in mastermacro version 2026-02-18 08:52:29 -06:00
a9759795c4 fix(scripts): changed the fake scanning to be more readable 2026-02-16 19:15:32 -06:00
32f26a1725 fix(quality): hoping to finally find the bud that kills me everynight 2026-02-16 19:05:41 -06:00
60533beed5 feat(sql): job disabling scripts 2026-02-16 19:05:08 -06:00
24ced97b6d feat(notification): added cycle count check 2026-02-16 19:04:38 -06:00
dc1d342799 fix(scanner): if host or port not sent over stop the connection right away 2026-02-16 19:04:04 -06:00
44d0cb63cf refactor(sql): moved new queries to there own folder to make it more easy to work and migrate 2026-02-16 19:01:23 -06:00
ace73fa919 refactor(sendmail): updated the smtp per alpla needs 2026-02-16 18:59:12 -06:00
316af4233f refactor(stats): added sheet version check in 2026-02-16 18:58:17 -06:00
36a805c652 refactor(scripts): create finance bol 2026-02-16 09:40:19 -06:00
460bc3d24a feat(query selector): queryselector from file based vs cp to ts filesz 2026-02-16 09:40:00 -06:00
ec201fcfb5 refactor(sql): full changes to localhost if on produciton server 2026-02-16 09:39:35 -06:00
914ad46c43 refactor(sql server): changes to look at localhost if in production 2026-02-16 09:38:55 -06:00
b96c546ed3 refactor(notify): changed to only allow max 100 errors in the email 2026-02-16 09:38:31 -06:00
29b3be41a1 build(notification): fixed fifo index ts errors 2026-02-16 09:38:10 -06:00
16edf58025 refactor(eom): changes to hist inv 2026-02-16 09:37:40 -06:00
775627f215 feat(scanner): tcp scanner connection based on env var no more db stuff 2026-02-16 09:37:14 -06:00
4e70fae69b refactor(api docs): added/changed docs 2026-02-16 09:36:44 -06:00
24dd109a21 fix(commandlog): changes to the log table 2026-02-13 16:08:23 -06:00
38b57a00cc refactor(datamart): article changes to add pet-g 2026-02-13 16:03:26 -06:00
f8070db95f fix(sqlserver): changed to proper pool connection 2026-02-13 16:02:43 -06:00
10e9dc430c fix(notification): limited to 1000 max errors 2026-02-13 15:59:38 -06:00
6b669ccd9c fix(labelinfo): corrected the query on label info for external 2026-02-13 14:51:15 -06:00
d9a10d98a1 refactor(sendmail): change the send mail function from noreply to donotreply 2026-02-13 14:50:44 -06:00
e64dc7c013 refactor(ocp): removed zechetti 2 from this silly thing for now 2026-02-13 14:50:07 -06:00
d63138d746 helper scripts 2026-02-03 15:40:51 -06:00
84a28f2d01 added relocate 2026-02-03 15:40:41 -06:00
51 changed files with 2332 additions and 544 deletions

View File

@@ -1,6 +1,6 @@
{ {
"version": "1", "version": "1",
"name": "LogisticsSupportTool_API_DOCS", "name": "lstv2",
"type": "collection", "type": "collection",
"ignore": [ "ignore": [
"node_modules", "node_modules",

View File

@@ -1,5 +1,5 @@
vars { vars {
url: http://localhost:4200 url: http://localhost:5500
session_cookie: session_cookie:
urlv2: http://usbow1vms006:3000 urlv2: http://usbow1vms006:3000
jwtV2: jwtV2:

View File

@@ -0,0 +1,24 @@
meta {
name: bookout
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/logistics/bookout
body: json
auth: none
}
body:json {
{
"runningNr": "1865027",
"reason": "packer printed premature"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 7
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,24 @@
meta {
name: relocate
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/relocate
body: json
auth: inherit
}
body:json {
{
"runningNr": "56121541",
"laneID": "30006"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,24 @@
meta {
name: removeAsWaste
type: http
seq: 3
}
post {
url: {{url}}/lst/old/api/logistics/removeasreusable
body: json
auth: none
}
body:json {
{
"runningNr": "1865018",
"reason": "validating stockout"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -58,6 +58,8 @@ router.get("/", async (req, res) => {
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${( memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024 used.rss / 1024 / 1024
).toFixed(2)} MB`, ).toFixed(2)} MB`,
eomFGPkgSheetVersion: 1, // this is the excel file version when we have a change to the macro we want to grab this
masterMacroFile: 1, // this is the excel file version when we have a change to the macro we want to grab this
}); });
}); });

View File

@@ -1,13 +1,13 @@
import type { Address } from "nodemailer/lib/mailer/index.js";
import type { Transporter } from "nodemailer"; import type { Transporter } from "nodemailer";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import type Mail from "nodemailer/lib/mailer/index.js";
import os from "os";
import nodemailer from "nodemailer"; import nodemailer from "nodemailer";
import type Mail from "nodemailer/lib/mailer/index.js";
import type { Address } from "nodemailer/lib/mailer/index.js";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import hbs from "nodemailer-express-handlebars";
import os from "os";
import path from "path"; import path from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
import { promisify } from "util"; import { promisify } from "util";
import hbs from "nodemailer-express-handlebars";
import { createLogger } from "../../logger/logger.js"; import { createLogger } from "../../logger/logger.js";
interface HandlebarsMailOptions extends Mail.Options { interface HandlebarsMailOptions extends Mail.Options {
@@ -27,57 +27,59 @@ export const sendEmail = async (data: EmailData): Promise<any> => {
let transporter: Transporter; let transporter: Transporter;
let fromEmail: string | Address; let fromEmail: string | Address;
if ( // if (
os.hostname().includes("OLP") && // os.hostname().includes("OLP") &&
process.env.EMAIL_USER && // process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD // process.env.EMAIL_PASSWORD
) { // ) {
transporter = nodemailer.createTransport({ // transporter = nodemailer.createTransport({
service: "gmail", // service: "gmail",
auth: { // auth: {
user: process.env.EMAIL_USER, // user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD, // pass: process.env.EMAIL_PASSWORD,
}, // },
//debug: true, // //debug: true,
}); // });
// update the from email // // update the from email
fromEmail = process.env.EMAIL_USER; // fromEmail = process.env.EMAIL_USER;
} else { // } else {
// convert to the correct plant token. // // convert to the correct plant token.
let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`; //let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
//const testServers = ["vms036", "VMS036"]; //const testServers = ["vms036", "VMS036"];
if (os.hostname().includes("VMS036")) { // if (os.hostname().includes("VMS036")) {
host = "USMCD1-smtp.alpla.net"; // host = "USMCD1-smtp.alpla.net";
} // }
// if (plantToken[0].value === "usiow2") { // if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net"; // host = "USIOW1-smtp.alpla.net";
// } // }
transporter = nodemailer.createTransport({ transporter = nodemailer.createTransport({
host: host, host: "smtp.azurecomm.net",
port: 25, port: 587,
rejectUnauthorized: false, //rejectUnauthorized: false,
//secure: false, tls: {
// auth: { minVersion: "TLSv1.2",
// user: "alplaprod", },
// pass: "obelix", auth: {
// }, user: "donotreply@mail.alpla.com",
pass: process.env.SMTP_PASSWORD,
},
debug: true, debug: true,
} as SMTPTransport.Options); } as SMTPTransport.Options);
// update the from email // update the from email
fromEmail = `noreply@alpla.com`; fromEmail = `DoNotReply@mail.alpla.com`;
} //}
// creating the handlbar options // creating the handlbar options
const viewPath = path.resolve( const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)), path.dirname(fileURLToPath(import.meta.url)),
"./views/" "./views/",
); );
const handlebarOptions = { const handlebarOptions = {
@@ -116,7 +118,7 @@ export const sendEmail = async (data: EmailData): Promise<any> => {
log.error( log.error(
{ error: err }, { error: err },
`Error sending Email to : ${data.email}` `Error sending Email to : ${data.email}`,
); );
return { success: false, message: "Error sending email.", error: err }; return { success: false, message: "Error sending email.", error: err };
} }

View File

@@ -12,14 +12,15 @@ import { LstCard } from "../../../extendedUi/LstCard";
export default function Relocate() { export default function Relocate() {
const [bookingIn, setBookingIn] = useState(false); const [bookingIn, setBookingIn] = useState(false);
const form = useForm({ const form = useForm({
defaultValues: { runningNr: " ", lane: "" }, defaultValues: { runningNr: " ", laneID: "" },
onSubmit: async ({ value }) => { onSubmit: async ({ value }) => {
// Do something with form data // Do something with form data
setBookingIn(true); setBookingIn(true);
try { try {
const res = await axios.post("/lst/old/api/ocp/bookin", { const res = await axios.post("/lst/old/api/logistics/relocate", {
runningNr: parseInt(value.runningNr), runningNr: parseInt(value.runningNr),
laneID: parseInt(value.laneID),
}); });
if (res.data.success) { if (res.data.success) {
@@ -27,15 +28,15 @@ export default function Relocate() {
form.reset(); form.reset();
setBookingIn(false); setBookingIn(false);
} else { } else {
console.log(res.data.data.errors); console.log(res.data.message);
toast.error(res.data.data.errors[0]?.message); toast.error(res.data.message);
form.reset(); //form.reset();
setBookingIn(false); setBookingIn(false);
} }
} catch (error) { } catch (error) {
console.log(error); console.log(error);
toast.error( toast.error(
"There was an error booking in pallet please validate you entered the correct info and try again.", "There was an error relocating the pallet please validate the data.",
); );
setBookingIn(false); setBookingIn(false);
} }
@@ -58,7 +59,7 @@ export default function Relocate() {
validators={{ validators={{
// We can choose between form-wide and field-specific validators // We can choose between form-wide and field-specific validators
onChange: ({ value }) => onChange: ({ value }) =>
value.length > 2 value.length > 0
? undefined ? undefined
: "Please enter a valid running number", : "Please enter a valid running number",
}} }}
@@ -83,19 +84,17 @@ export default function Relocate() {
}} }}
/> />
<form.Field <form.Field
name="lane" name="laneID"
validators={{ validators={{
// We can choose between form-wide and field-specific validators // We can choose between form-wide and field-specific validators
onChange: ({ value }) => onChange: ({ value }) =>
value.length > 2 value.length > 0 ? undefined : "Please enter a valid lane ID",
? undefined
: "Please enter a valid running number",
}} }}
children={(field) => { children={(field) => {
return ( return (
<div className=""> <div className="">
<Label htmlFor="runningNr" className="mb-2"> <Label htmlFor="laneID" className="mb-2">
Enter lane Enter lane ID
</Label> </Label>
<Input <Input
name={field.name} name={field.name}

View File

@@ -7,14 +7,18 @@ export default function HelperPage() {
return ( return (
<div className="flex flex-wrap m-2 justify-center"> <div className="flex flex-wrap m-2 justify-center">
<div className="m-1"> <div className="m-1">
<div className="m-1 ">
<Bookin /> <Bookin />
</div> </div>
<div className="w-96 m-1">
<Relocate />
</div>
</div>
<div className="m-1"> <div className="m-1">
{url === "localhost" && ( {url === "localhost" && (
<div className="m-1"> <div className="m-1">
<RemoveAsNonReusable /> <RemoveAsNonReusable />
<Relocate />
</div> </div>
)} )}
</div> </div>

View File

@@ -1,4 +1,4 @@
import { text, pgTable, timestamp, uuid, jsonb } from "drizzle-orm/pg-core"; import { jsonb, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod"; import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { z } from "zod"; import { z } from "zod";
@@ -9,12 +9,12 @@ export const commandLog = pgTable(
commandUsed: text("commandUsed").notNull(), commandUsed: text("commandUsed").notNull(),
bodySent: jsonb("bodySent").default([]), bodySent: jsonb("bodySent").default([]),
reasonUsed: text("reasonUsed"), reasonUsed: text("reasonUsed"),
add_at: timestamp("add_Date").defaultNow(), addDate: timestamp("add_Date").defaultNow(),
}, },
(table) => [ (table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name), // uniqueIndex("role_name").on(table.name),
] ],
); );
// Schema for inserting a user - can be used to validate API requests // Schema for inserting a user - can be used to validate API requests

View File

@@ -10,7 +10,8 @@
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts", "dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts", "dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server", "build": "npm run build:server",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ", "build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y && npm run build:copySql",
"build:copySql": "xcopy server\\services\\sqlServer\\querys\\newQueries dist\\server\\services\\sqlServer\\querys\\newQueries\\ /E /I /Y ",
"build:frontend": "cd frontend && npm run build", "build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild", "build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts", "copy:scripts": "tsx server/scripts/copyScripts.ts",

View File

@@ -0,0 +1,191 @@
/**
* Using this to make a scanner connection to the server.
*/
import net from "net";
interface QueuedCommand {
command: string;
resolve: (value: string) => void;
reject: (reason?: any) => void;
timeout: NodeJS.Timeout;
}
const STX = "\x02";
const ETX = "\x03";
// const prodIP = process.env.SERVER_IP as string;
// const prodPort = parseInt(process.env.SCANNER_PORT || "50000", 10);
// const scannerID = `${process.env.SCANNER_ID}@`;
//const scannerCommand = "AlplaPRODcmd00000042#000028547"; // top of the picksheet
export class ScannerClient {
private socket = new net.Socket();
private connected = false;
private queue: QueuedCommand[] = [];
private processing = false;
private incomingBuffer = "";
constructor(
private host: string,
private port: number,
private scannerId: string,
) {
this.initialize();
}
private initialize() {
if (!this.host || !this.port) {
console.log("Host or port is missing");
return;
}
this.socket.connect(this.port, this.host, () => {
console.info("Connected to scanner");
this.connected = true;
});
this.socket.on("data", (data) => this.handleData(data));
this.socket.on("close", () => {
console.log("Scanner connection closed");
this.connected = false;
});
this.socket.on("error", (err) => {
console.error("Scanner error:", err);
});
}
// ✅ Public method you use
public scan(command: string): Promise<string> {
if (!this.connected) {
return Promise.reject("Scanner not connected");
}
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
this.processing = false;
reject("Scanner timeout");
this.processQueue();
}, 5000); // 5s safety timeout
this.queue.push({
command,
resolve,
reject,
timeout,
});
this.processQueue();
});
}
// ✅ Ensures strict FIFO processing
private processQueue() {
if (this.processing) return;
if (this.queue.length === 0) return;
this.processing = true;
const current = this.queue[0];
const message = Buffer.from(
`${STX}${this.scannerId}${current.command}${ETX}`,
"ascii",
);
this.socket.write(message);
}
// ✅ Handles full STX/ETX framed responses
private handleData(data: Buffer) {
console.log(
"ASCII:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
const current = this.queue.shift();
if (current) {
clearTimeout(current.timeout);
current.resolve(data.toString("ascii"));
}
this.processing = false;
this.processQueue();
}
}
export const scanner = new ScannerClient(
process.env.SERVER_IP!,
parseInt(process.env.SCANNER_PORT!, 10),
`${process.env.SCANNER_ID}@`,
);
// export const connectToScanner = () => {
// if (!process.env.SERVER_IP || !process.env.SCANNER_PORT) {
// return {
// success: false,
// message: "Missing ServerIP or ServerPort",
// };
// }
// scanner.connect(prodPort, prodIP, () => {
// console.log("Connected to scanner");
// connected = true;
// });
// };
// export const scan = async (command: string) => {
// if (!connected) {
// return {
// success: false,
// message: "Scanner is not connected, please contact admin",
// };
// }
// if (inScanCommand) {
// bufferCommands.push({ timeStamp: new Date(Date.now()), command: command });
// }
// // we are going to set to scanning
// inScanCommand = true;
// const message = Buffer.from(`${STX}${scannerID}${command}${ETX}`, "ascii");
// scanner.write(message);
// await new Promise((resolve) => setTimeout(resolve, 750));
// inScanCommand = false;
// if (bufferCommands.length > 0) {
// await scan(bufferCommands[0].command);
// bufferCommands.shift();
// }
// return {
// success: true,
// message: "Scan completed",
// };
// };
// scanner.on("data", async (data) => {
// console.log(
// "Response:",
// data
// .toString("ascii")
// .replace(/\x00/g, "") // remove null bytes
// .replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
// .trim(),
// );
// });
// scanner.on("close", () => {
// console.log("Connection closed");
// });
// scanner.on("error", (err) => {
// console.error("Scanner error:", err);
// });

View File

@@ -51,6 +51,7 @@ export const psiGetPlanningData = async (
}; };
} }
// TODO: if we are not running planning we no pass the old structure if we are running new planning use the below improved version that makes sure we dont have negative numebrs.
articles = data.data; articles = data.data;
return { return {

View File

@@ -145,7 +145,7 @@ app.openapi(
return c.json({ return c.json({
success: true, success: true,
message: "All Current Active Querys.", message: "All Current Active Querys.",
sheetVersion: 2.8, sheetVersion: 2.8, // TODO: when this gets switched change this
data: current, data: current,
}); });
}, },

View File

@@ -63,10 +63,10 @@ setTimeout(async () => {
// the time we want to run the hostircal data should be the same time the historical data run on the server // the time we want to run the hostircal data should be the same time the historical data run on the server
// getting this from the shift time // getting this from the shift time
if (process.env.NODE_ENV?.trim() !== "production") { //if (process.env.NODE_ENV?.trim() !== "production") {
setTimeout(() => { setTimeout(() => {
historicalInvIMmport(); historicalInvIMmport();
}, 15 * 1000); }, 15 * 1000);
} //}
export default app; export default app;

View File

@@ -0,0 +1,155 @@
import axios from "axios";
import net from "net";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
type Data = {
runningNr: number;
reason: string;
user: string;
};
export const bookOutPallet = async (data: Data) => {
const { runningNr, reason, user } = data;
if (!reason || reason.length < 4) {
return {
success: false,
status: 400,
message: "The reason provided is to short",
data: [],
};
}
const queryCheck = sqlQuerySelector("inventoryInfo.query");
if (!queryCheck.success) {
return {
success: false,
status: 400,
message: queryCheck.message,
data: data,
};
}
const { data: label, error: labelError } = (await tryCatch(
query(
queryCheck.query!.replace("[runningNr]", `${runningNr}`),
"labelQuery",
),
)) as any;
if (labelError) {
return {
success: false,
status: 400,
message: labelError.message,
data: labelError,
};
}
// check if we are in ppoo
if (label.data.length <= 0) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently in ppoo, please move to ppoo before trying to book-out`,
data: [],
};
}
// check if the label is blocked for coa.
if (
label.data[0].blockingReason &&
!label.data[0].blockingReason?.includes("COA")
) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently blocked for coa, to get this pallet booked out please take the label to quality to be released then you can book-out.`,
data: [],
};
}
if (label.data[0].blockingReason) {
await scanner.scan("AlplaPRODcmd89");
await scanner.scan(`${label.data[0].barcode}`);
}
// create the url to post
const url = await prodEndpointCreation(
"/public/v1.1/Manufacturing/ProductionControlling/BookOut",
);
const SSCC = await createSSCC(runningNr);
const bookOutData = {
sscc: SSCC.slice(2),
scannerId: "666",
};
try {
const results = await axios.post(url, bookOutData, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
if (results.data.Errors) {
return {
success: false,
status: 400,
message: results.data.Errors.Error.Description,
};
}
// if (results.data.Result !== 0) {
// console.log("stopping here and closing to soon", results);
// return {
// success: false,
// status: 400,
// message: results.data.Message,
// };
// }
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "book out",
bodySent: data,
reasonUsed: reason,
}),
);
return {
success: true,
message: `${runningNr} was booked out`,
status: results.status,
};
} catch (error: any) {
console.log(bookOutData);
return {
success: false,
status: 400,
message: error.response?.data,
data: error.response?.data,
};
}
// });
/**
* book out the label with
* url /public/v1.1/Manufacturing/ProductionControlling/BookOut
* {
* "sscc": "string",
* "scannerId": "string"
* }
*/
//---------------------------------------------------------------------------------------\\
};

View File

@@ -0,0 +1,96 @@
import axios from "axios";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
type Data = {
runningNr: number;
laneID: number;
};
export const relatePallet = async (data: Data) => {
const { runningNr, laneID } = data;
// replace the rn
// console.log(data);
// create the url to post
// do we have warehousing turned on?
const { data: feature, error: featureError } = (await tryCatch(
query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/Relocate";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/Relocate";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Unit/Relocate";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const SSCC = await createSSCC(runningNr);
const consumeSomething = {
ScannerId: 999,
laneId: laneID,
sscc: SSCC.slice(2),
};
console.log(consumeSomething);
try {
const results = await axios.post(url, consumeSomething, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
if (results.data.Errors) {
return {
success: false,
message: results.data.Errors.Error.Description,
};
}
if (results.data.Result !== 0 || results.data.data.length <= 0) {
return {
success: false,
message: results.data.Message,
};
}
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "relocate",
bodySent: data,
}),
);
return {
success: true,
message: "Pallet Was Relocated",
status: results.status,
};
} catch (error: any) {
console.log(error);
return {
success: false,
status: 200,
message: error.response?.data.errors[0].message,
};
}
};

View File

@@ -1,41 +1,14 @@
import axios from "axios";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { lstAuth } from "../../../../index.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { db } from "../../../../../database/dbclient.js"; import { db } from "../../../../../database/dbclient.js";
import net from "net"; import { commandLog } from "../../../../../database/schema/commandLog.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js"; import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
import { settings } from "../../../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { serverData } from "../../../../../database/schema/serverData.js";
export const removeAsNonReusable = async (data: any) => { export const removeAsNonReusable = async (data: any) => {
// const removalUrl = await prodEndpointCreation(
// "/public/v1.0/Warehousing/RemoveAsNonReusableMaterial"
// );
// const sscc = await createSSCC(data.runningNr);
// const { data: remove, error } = await tryCatch(
// axios.post(
// removalUrl,
// { scannerId: "500", sscc: sscc.slice(2) },
// {
// headers: { Authorization: `Basic ${lstAuth}` },
// }
// )
// );
// use a scanner tcp connection to trigger this process
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
let stage = 0;
// get the label info // get the label info
const { data: label, error: labelError } = (await tryCatch( const { data: label, error: labelError } = (await tryCatch(
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info") query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info"),
)) as any; )) as any;
if (label.data[0].stockStatus === "notOnStock") { if (label.data[0].stockStatus === "notOnStock") {
@@ -46,60 +19,17 @@ export const removeAsNonReusable = async (data: any) => {
}; };
} }
// get the server ip based on the token. if (label.data[0].blockingReason) {
const setting = await db.select().from(settings);
const plantInfo = await db.select().from(serverData);
const plantToken = setting.filter((n: any) => n.name === "plantToken");
const scannerID = setting.filter((n: any) => n.name === "scannerID");
const scannerPort = setting.filter((n: any) => n.name === "scannerPort");
const plantData = plantInfo.filter(
(p: any) => p.plantToken === plantToken[0].value
);
scanner.connect(
parseInt(scannerPort[0].value),
plantData[0].idAddress!,
async () => {
// need to get the ip from the server data and scanner port
//console.log(`connected to scanner`);
scanner.write(`${STX}${scannerID[0].value}@AlplaPRODcmd23${ETX}`);
}
);
scanner.on("data", (data) => {
const response = data.toString();
//console.log("Received:", response.trimStart());
if (stage === 0) {
stage = 1;
scanner.write(
`${STX}${scannerID[0].value}@${label.data[0].Barcode}${ETX}`
);
} else if (stage === 1) {
scanner.end();
}
});
scanner.on("close", () => {
//console.log("Connection closed");
scanner.destroy();
});
scanner.on("error", (err) => {
//console.error("Scanner error:", err);
scanner.destroy();
return { return {
success: false, success: false,
message: `The label: ${data.runningNr} encountering an error while being removed, please try again`, status: 400,
message: `${data.runningNr} is currently blocked, to get this pallet removed please take the label to quality to be released then you can remove.`,
data: [], data: [],
}; };
}); }
// if (error) { await scanner.scan("AlplaPRODcmd23");
// //console.log(error); await scanner.scan(`${label.data[0].barcode}`);
// return {
// success: false,
// message: `There was an error removing ${data.runningNr}`,
// data: [],
// };
// }
let reason = data.reason || ""; let reason = data.reason || "";
delete data.reason; delete data.reason;
@@ -109,7 +39,7 @@ export const removeAsNonReusable = async (data: any) => {
commandUsed: "removeAsNonReusable", commandUsed: "removeAsNonReusable",
bodySent: data, bodySent: data,
reasonUsed: reason, reasonUsed: reason,
}) }),
); );
return { return {

View File

@@ -1,6 +1,7 @@
import axios from "axios"; import axios from "axios";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js"; import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
export const postAdjustment = async (data: any) => { export const postAdjustment = async (data: any) => {
if (data.warehouseId === undefined) { if (data.warehouseId === undefined) {
@@ -35,9 +36,30 @@ export const postAdjustment = async (data: any) => {
quantity: data.quantity, quantity: data.quantity,
}; };
let url = await prodEndpointCreation( // do we have warehousing turned on?
"/public/v1.0/Warehousing/AdjustSiloStockLevel", const { data: feature, error: featureError } = (await tryCatch(
); query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const { data: silo, error } = await tryCatch( const { data: silo, error } = await tryCatch(
axios.post(url, siloAdjustment, { axios.post(url, siloAdjustment, {

View File

@@ -1,7 +1,9 @@
import { OpenAPIHono } from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js"; import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js";
import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js"; import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js";
import attachSilo from "./route/attachSilo.js"; import attachSilo from "./route/attachSilo.js";
import bookOutPallet from "./route/bookout.js";
import comsumeMaterial from "./route/consumeMaterial.js"; import comsumeMaterial from "./route/consumeMaterial.js";
import detachSilo from "./route/detachSilo.js"; import detachSilo from "./route/detachSilo.js";
import postBulkOrders from "./route/dm/bulkOrdersIn.js"; import postBulkOrders from "./route/dm/bulkOrdersIn.js";
@@ -16,6 +18,7 @@ import outbound from "./route/getOutbound.js";
import getPPOO from "./route/getPPOO.js"; import getPPOO from "./route/getPPOO.js";
import getConnectionType from "./route/getSiloConnectionData.js"; import getConnectionType from "./route/getSiloConnectionData.js";
import getSSCC from "./route/getSSCCNumber.js"; import getSSCC from "./route/getSSCCNumber.js";
import relocate from "./route/relocate.js";
import removeAsNonReable from "./route/removeAsNonReusable.js"; import removeAsNonReable from "./route/removeAsNonReusable.js";
import returnMat from "./route/returnMaterial.js"; import returnMat from "./route/returnMaterial.js";
import createSiloAdjustment from "./route/siloAdjustments/createSiloAdjustment.js"; import createSiloAdjustment from "./route/siloAdjustments/createSiloAdjustment.js";
@@ -28,7 +31,7 @@ const app = new OpenAPIHono();
const routes = [ const routes = [
comsumeMaterial, comsumeMaterial,
returnMat, returnMat,
relocate,
// silo // silo
createSiloAdjustment, createSiloAdjustment,
postComment, postComment,
@@ -55,6 +58,7 @@ const routes = [
// logisitcs // logisitcs
removeAsNonReable, removeAsNonReable,
getSSCC, getSSCC,
bookOutPallet,
] as const; ] as const;
// app.route("/server", modules); // app.route("/server", modules);

View File

@@ -0,0 +1,87 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
//import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { bookOutPallet } from "../controller/commands/bookout.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Consumes material based on its running number",
method: "post",
path: "/bookout",
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
const { data, error } = await tryCatch(c.req.json());
if (error) {
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
apiHit(c, { endpoint: "/bookout", lastBody: data });
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
//const pointData = { ...data, user: payload.user };
const bookout = await bookOutPallet(data);
console.log("from booout:", bookout);
return c.json(
{
success: bookout?.success,
message: bookout?.message,
data: bookout.data,
},
200,
);
} catch (error) {
console.log("from error:", error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
},
);
export default app;

View File

@@ -0,0 +1,80 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { relatePallet } from "../controller/commands/relocated.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Consumes material based on its running number",
method: "post",
path: "/relocate",
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
const { data, error } = await tryCatch(c.req.json());
if (error) {
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
apiHit(c, { endpoint: "/relocate", lastBody: data });
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
const consume = await relatePallet(data);
console.log(consume);
return c.json(
{ success: consume?.success, message: consume?.message },
200,
);
} catch (error) {
//console.log(error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
},
);
export default app;

View File

@@ -4,8 +4,8 @@ import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js"; import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js"; import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js";
import { sendEmail } from "../sendMail.js";
const notification = async (notifyData: any) => { const notification = async (notifyData: any) => {
/** /**
@@ -19,7 +19,7 @@ const notification = async (notifyData: any) => {
"error", "error",
"reprinting", "reprinting",
"notify", "notify",
`There are no emails set for ${notifyData.name}` `There are no emails set for ${notifyData.name}`,
); );
return; return;
} }
@@ -28,12 +28,9 @@ const notification = async (notifyData: any) => {
const { data: l, error: labelError } = await tryCatch( const { data: l, error: labelError } = await tryCatch(
query( query(
bow2incoming.replace( bow2incoming.replace("[time]", notifyData.notifiySettings.processTime),
"[time]", "Label Reprints",
notifyData.notifiySettings.processTime
), ),
"Label Reprints"
)
); );
const labels: any = l?.data as any; const labels: any = l?.data as any;
if (labelError) { if (labelError) {
@@ -41,7 +38,7 @@ const notification = async (notifyData: any) => {
"error", "error",
"reprinting", "reprinting",
"notify", "notify",
`Failed to get the labels: ${labelError}` `Failed to get the labels: ${labelError}`,
); );
return; return;
} }
@@ -65,7 +62,7 @@ const notification = async (notifyData: any) => {
"error", "error",
"reprinting", "reprinting",
"notify", "notify",
"Failed to send email, will try again on next interval" "Failed to send email, will try again on next interval",
); );
return; return;
} }
@@ -88,7 +85,7 @@ const notification = async (notifyData: any) => {
prodID: labels[0].IdEtikettenHistorie, prodID: labels[0].IdEtikettenHistorie,
}, },
}) })
.where(eq(notifications.name, notifyData.name)) .where(eq(notifications.name, notifyData.name)),
); );
} else { } else {
return; return;

View File

@@ -0,0 +1,108 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
export interface Labels {
IdEtikettenHistorie?: number;
}
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"reprinting",
"notify",
`There are no emails set for ${notifyData.name}`,
);
return;
}
const cycleCountCheck = sqlQuerySelector("cycleCountCheck.query") as SqlQuery;
if (!cycleCountCheck.success) {
console.log("Failed to load the query: ", cycleCountCheck.message);
return;
}
const { data: c, error: cError } = await tryCatch(
query(
cycleCountCheck.query.replace("[timeTest]", notifyData.checkInterval),
"Cycle count check",
),
);
const cycle: any = c?.data ?? ([] as any);
//console.log(cycle);
if (cError) {
createLog(
"error",
"reprinting",
"notify",
`Failed to get the labels: ${cError}`,
);
return;
}
if (cycle.length > 0) {
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: `Alert! RowBlocked for more than ${notifyData.checkInterval} min(s)`,
template: "cycleCountCheck",
context: {
checkTime: notifyData.checkInterval,
items: cycle,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"reprinting",
"notify",
"Failed to send email, will try again on next interval",
);
return;
}
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// update the last time ran
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
// notifiySettings: {
// ...notifyData.notifiySettings,
// prodID: labels[0].IdEtikettenHistorie,
// },
})
.where(eq(notifications.name, notifyData.name)),
);
} else {
return;
}
};
export default notification;

View File

@@ -1,21 +1,21 @@
import { isBefore } from "date-fns"; import { isBefore } from "date-fns";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js"; import { query } from "../../../sqlServer/prodSqlServer.js";
import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js"; import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js";
import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js"; import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
export default async function fifoIndexCheck() { export default async function fifoIndexCheck() {
/** /**
* getting the shipped pallets * getting the shipped pallets
*/ */
const { data: shipped, error: eShipped } = await tryCatch( const { data: shipped, error: eShipped } = await tryCatch(
query(shippedPallets, "notify shipped pallets") query(shippedPallets, "notify shipped pallets"),
); );
const { data: currentStuff, error: eCurrentInv } = await tryCatch( const { data: currentStuff, error: eCurrentInv } = await tryCatch(
query(currentInv, "notify shipped pallets") query(currentInv, "notify shipped pallets"),
); );
// console.log(shipped?.data[2]); // console.log(shipped?.data[2]);
@@ -24,14 +24,14 @@ export default async function fifoIndexCheck() {
/** /**
* We want to check if the each shippened pallet is out of fifo * We want to check if the each shippened pallet is out of fifo
*/ */
const check = shipped?.data.map((n: any) => { const check: any = shipped?.data.map((n: any) => {
/** /**
* Returns all data so we know if we are in or out. * Returns all data so we know if we are in or out.
*/ */
//check if there are pallets older than the current one we are mapped on. //check if there are pallets older than the current one we are mapped on.
const fifoCheck = currentStuff?.data.filter( const fifoCheck = currentStuff?.data.filter(
(i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av (i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av,
); ) as any;
//console.log(fifoCheck.length); //console.log(fifoCheck.length);
if (fifoCheck.length > 0) { if (fifoCheck.length > 0) {
// console.log("Out of fifo", { // console.log("Out of fifo", {
@@ -83,7 +83,7 @@ export default async function fifoIndexCheck() {
/** /**
* add the data to the db * add the data to the db
*/ */
for (let i = 0; i < check.length; i++) { for (let i = 0; i < check!.length; i++) {
const { data: dbInsert, error: dbE } = await tryCatch( const { data: dbInsert, error: dbE } = await tryCatch(
db db
.insert(fifoIndex) .insert(fifoIndex)
@@ -95,7 +95,7 @@ export default async function fifoIndexCheck() {
fifoFollowed: check[i].fifoFollowed, fifoFollowed: check[i].fifoFollowed,
add_Date: check[i].add_Date, add_Date: check[i].add_Date,
}) })
.onConflictDoNothing() .onConflictDoNothing(),
); );
} }
@@ -105,7 +105,7 @@ export default async function fifoIndexCheck() {
data: { data: {
palletsOut: check, palletsOut: check,
totalShipped: shipped?.data.length, totalShipped: shipped?.data.length,
inFifo: shipped?.data.length - totalOut, inFifo: shipped!.data.length - totalOut,
outOfFifoData: outOfFifo, outOfFifoData: outOfFifo,
}, },
}; };

View File

@@ -0,0 +1,183 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
let running = false;
export default async function platToPlantEdi(notifyData: any) {
createLog("info", "plantToPlant", "notify", `monitoring ${notifyData.name}`);
if (running) {
createLog(
"info",
"plantToPlant",
"notify",
`Notifcation ${notifyData.name} is already running skipping`,
);
return;
}
running = true;
const { data: noti, error: notiError } = (await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, notifyData.name)),
)) as any;
if (notiError) {
createLog(
"error",
"edi",
"notify",
"Error in getting the notification data",
);
}
// get the default emails they can be blank if as we will only add these to the end of the email from the full flow
let emails = noti[0]?.email ?? "";
const checkBol = sqlQuerySelector("checkBol.query");
if (!checkBol.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
const pLinkedB = sqlQuerySelector("palletsLinkedToBol.query");
if (!pLinkedB.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
let ignoreBols: string[] = noti[0]?.notifiySettings?.processedBol ?? [];
const joinBols = ignoreBols.join(",");
let updateQuery = noti[0]?.notifiySettings?.includeAll
? checkBol?.query?.replace(
"and a.bezeichnung like '%Alpla%'",
"--and a.bezeichnung like '%Alpla%'",
)
: checkBol?.query;
const { data: b, error: bError } = (await tryCatch(
query(
updateQuery
?.replace("[timeCheck]", noti[0]?.checkInterval ?? "30")
.replace("[ignoreBols]", joinBols ?? 500) ?? "",
"Check bol",
),
)) as any;
if (bError) {
return {
success: false,
message: "Error getting newly created bols",
data: bError,
};
}
const planedByBol = new Map<string, string[]>();
for (const row of b.data) {
if (!planedByBol.has(row.bol)) {
planedByBol.set(row.bol, []);
}
planedByBol.get(row.bol)!.push(String(row.idladeplanung));
}
if (b.data.length > 0) {
// loop each bol in the system and get the bols only
for (const [bolNumber, idList] of planedByBol.entries()) {
//for (const bol of b.data) {
// run the process to get the the pallet numbers
const joinedIdLadeplanung = idList.join(",");
//console.log("BOL:", bolNumber);
//console.log("IDLadeplanung string:", joinedIdLadeplanung);
//console.log("IgnoreBols: ", joinBols);
// now get the pallets that are witing the ladeplanning
const { data: pallets, error: pError } = await tryCatch(
query(
pLinkedB?.query?.replace(
"[palLinkedToBol]",
joinedIdLadeplanung ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
);
//console.log(pallets);
// console.log("Address: ", b.data[0].addressId ?? "0");
if (b.data[0].addressId === "") return;
ignoreBols.push(bolNumber);
if (ignoreBols.length > 15) {
ignoreBols.splice(0, ignoreBols.length - 15);
}
// get the email address.
const checkBol = sqlQuerySelector("addressInfo.query");
const { data: address, error: aError } = (await tryCatch(
query(
checkBol?.query?.replace(
"[customerAddress]",
b.data[0].addressId ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
)) as any;
if (noti[0]?.emails === "") return; // no default emails
// setup the email to be sent :D
const emailSetup = {
email: `${noti[0]?.emails};${address.data[0].email ?? ""}`,
subject: `New EDI transfer Created for BOL: ${bolNumber}`,
template: "plantToPlantEdi",
context: {
items: pallets?.data ?? [],
bol: bolNumber,
//secondarySetting: notifyData.notifiySettings,
},
};
// send the email
await sendEmail(emailSetup);
// add the bols to be ignored
await db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...noti[0]?.notifiySettings,
processedBol: ignoreBols,
},
})
.where(eq(notifications.name, notifyData.name));
}
running = false;
return {
success: true,
message: "All bols have been processed",
data: [ignoreBols],
};
}
running = false;
return {
success: true,
message: "No new bols have been created",
data: [],
};
}

View File

@@ -70,7 +70,7 @@ export default async function tooManyErrors(notifyData: any) {
} ${errorLogData.length > 1 ? "errors" : "error"} in the last ${notifyData.checkInterval} min`, } ${errorLogData.length > 1 ? "errors" : "error"} in the last ${notifyData.checkInterval} min`,
template: "tooManyErrors", template: "tooManyErrors",
context: { context: {
data: errorLogData, data: errorLogData.slice(0, 100),
count: notifyData.notifiySettings.errorCount, count: notifyData.notifiySettings.errorCount,
time: notifyData.checkInterval, time: notifyData.checkInterval,
}, },

View File

@@ -45,57 +45,57 @@ export const sendEmail = async (data: any): Promise<any> => {
}; };
} }
// get the plantToken // get the plantToken
const server = settingData.filter((n) => n.name === "server"); //const server = settingData.filter((n) => n.name === "server");
if ( // if (
server[0].value === "localhostx" && // server[0].value === "localhostx" &&
process.env.EMAIL_USER && // process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD // process.env.EMAIL_PASSWORD
) { // ) {
transporter = nodemailer.createTransport({ // transporter = nodemailer.createTransport({
service: "gmail", // service: "gmail",
host: "smtp.gmail.com", // host: "smtp.gmail.com",
port: 465, // port: 465,
auth: {
user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD,
},
//debug: true,
});
// update the from email
fromEmail = process.env.EMAIL_USER;
} else {
// convert to the correct plant token.
const plantToken = settingData.filter((s) => s.name === "plantToken");
let host = `${plantToken[0].value}-smtp.alpla.net`;
const testServers = ["test1", "test2", "test3"];
if (testServers.includes(plantToken[0].value)) {
host = "USMCD1-smtp.alpla.net";
}
if (plantToken[0].value === "usiow2") {
host = "USIOW1-smtp.alpla.net";
}
transporter = nodemailer.createTransport({
host: host,
port: 25,
rejectUnauthorized: false,
//secure: false,
// auth: { // auth: {
// user: "alplaprod", // user: process.env.EMAIL_USER,
// pass: "obelix", // pass: process.env.EMAIL_PASSWORD,
// }, // },
debug: true, // //debug: true,
} as SMTPTransport.Options); // });
// update the from email // // update the from email
fromEmail = `noreply@alpla.com`; // fromEmail = process.env.EMAIL_USER;
} //} else {
// convert to the correct plant token.
//const plantToken = settingData.filter((s) => s.name === "plantToken");
// let host = `${plantToken[0].value}-smtp.alpla.net`;
// const testServers = ["test1", "test2", "test3"];
// if (testServers.includes(plantToken[0].value)) {
// host = "USMCD1-smtp.alpla.net";
// }
// if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net";
// }
transporter = nodemailer.createTransport({
host: "smtp.azurecomm.net",
port: 587,
//rejectUnauthorized: false,
tls: {
minVersion: "TLSv1.2",
},
auth: {
user: "donotreply@mail.alpla.com",
pass: process.env.SMTP_PASSWORD,
},
debug: true,
});
fromEmail = `DoNotReply@mail.alpla.com`;
//}
// creating the handlbar options // creating the handlbar options
const viewPath = path.resolve( const viewPath = path.resolve(

View File

@@ -12,6 +12,7 @@ import blocking from "./routes/qualityBlocking.js";
import sendemail from "./routes/sendMail.js"; import sendemail from "./routes/sendMail.js";
import errorHandling from "./routes/tooManyErrors.js"; import errorHandling from "./routes/tooManyErrors.js";
import { note, notificationCreate } from "./utils/masterNotifications.js"; import { note, notificationCreate } from "./utils/masterNotifications.js";
import { sqlJobCleanUp } from "./utils/notificationSqlCleanup.js";
import { startNotificationMonitor } from "./utils/processNotifications.js"; import { startNotificationMonitor } from "./utils/processNotifications.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
@@ -57,6 +58,7 @@ if (notesError) {
setTimeout(() => { setTimeout(() => {
notificationCreate(); notificationCreate();
startNotificationMonitor(); startNotificationMonitor();
sqlJobCleanUp();
}, 5 * 1000); }, 5 * 1000);
export default app; export default app;

View File

@@ -152,6 +152,28 @@ export const note: any = [
errorCount: 10, // change this to something else or leave blank to use the av type errorCount: 10, // change this to something else or leave blank to use the av type
}, },
}, },
{
name: "cycleCountCheck",
description:
"Checks if a cycle count has been active for longer than the defined time.",
checkInterval: 60,
timeType: "min",
emails: "",
active: false,
notifiySettings: {
errorCount: 10, // change this to something else or leave blank to use the av type
},
},
{
name: "platToPlantEdi",
description:
"This is the plant to plant edi that will send an edi to the email once it ships, the emails will be for the receiving plants",
checkInterval: 15,
timeType: "min",
emails: "blake.matthes@alpla.com;Maritza.Hernandez@alpla.com",
active: false,
notifiySettings: { processedBol: [500], includeAll: false },
},
]; ];
export const notificationCreate = async () => { export const notificationCreate = async () => {

View File

@@ -0,0 +1,86 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../../sqlServer/utils/querySelector.utils.js";
const cleanUpQuery = `
DECLARE @JobName varchar(max) = '[jobName]'
UPDATE msdb.dbo.sysjobs
SET enabled = 0
WHERE name = @JobName;
`;
// disable the jobs
const jobNames: string[] = [
"monitor_$_lots",
"monitor_$_lots_2",
"monitor$lots",
"Monitor_APO", //listen for people to cry this is no longer a thing
"Monitor_APO2",
"Monitor_AutoConsumeMaterials", // TODO: migrate to lst
"Monitor_AutoConsumeMaterials_iow1",
"Monitor_AutoConsumeMaterials_iow2",
"Monitor_BlockedINV_Loc",
"monitor_inv_cycle",
"monitor_inv_cycle_1",
"monitor_inv_cycle_2",
"monitor_edi_import", // TODO: migrate to lst -- for the query select count(*) from AlplaPROD_test3.dbo.T_EDIDokumente (nolock) where /* IdLieferant > 1 and */ add_date > DATEADD(MINUTE, -30, getdate())
"Monitor_Lot_Progression",
"Monitor_Lots", // TODO: migrate to lst -- this should be the one where we monitor the when a lot is assigned if its missing some data.
"Monitor_MinMax", // TODO:Migrate to lst
"Monitor_MinMax_iow2",
"Monitor_PM",
"Monitor_Purity",
"monitor_wastebookings", // TODO: Migrate
"LastPriceUpdate", // not even sure what this is
"GETLabelsCount", // seems like an old jc job
"jobforpuritycount", // was not even working correctly
"Monitor_EmptyAutoConsumLocations", // not sure who uses this one
"monitor_labelreprint", // Migrated but need to find out who really wants this
"test", // not even sure why this is active
"UpdateLastMoldUsed", // old jc inserts data into a table but not sure what its used for not linked to any other alert
"UpdateWhsePositions3", // old jc inserts data into a table but not sure what its used for not linked to any other alert
"UpdateWhsePositions4",
"delete_print", // i think this was in here for when we was having lag prints in iowa1
"INV_WHSE_1", // something random i wrote long time ago looks like an inv thing to see aged stuff
"INV_WHSE_2",
"laneAgeCheck", // another strange one thats been since moved to lst
"monitor_blocking_2",
"monitor_blocking", // already in lst
"monitor_min_inv", // do we still want this one? it has a description of: this checks m-f the min inventory of materials based on the min level set in stock
"Monitor_MixedLocations",
"Monitor_PM",
"Monitor_PM2",
"wrong_lots_1",
"wrong_lots_2",
"invenotry check", // spelling error one of my stupids
"monitor_hold_monitor",
"Monitor_Silo_adjustments",
"monitor_qualityLocMonitor", // validating with lima this is still needed
];
export const sqlJobCleanUp = async () => {
// running a query to disable jobs that are moved to lst to be better maintained
const sqlQuery = sqlQuerySelector("disableJob.query") as SqlQuery;
if (!sqlQuery.success) {
console.log("Failed to load the query: ", sqlQuery.message);
return;
}
for (const job of jobNames) {
const { data, error } = await tryCatch(
query(
sqlQuery.query.replace("[jobName]", `${job}`),
`Disabling job: ${job}`,
),
);
if (error) {
console.log(error);
}
//console.log(data);
}
};

View File

@@ -0,0 +1,44 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>The below are cycle counts that have been in progress for longer than {{checkTime}} min(s). </p>
<table >
<thead>
<tr>
<th>WarehouseID</th>
<th>Warehouse</th>
<th>LocationID</th>
<th>Location</th>
<th>Cycle count Started</th>
<th>Started by</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{idWarehouse}}</td>
<td>{{warehouse}}</td>
<td>{{locationId}}</td>
<td>{{location}}</td>
<td>{{cycleCountStartAt}}</td>
<td>{{blockedBy}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,46 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>BOL: {{bol}} was created with the below pallets.</p>
<p>Please head to stock and import the pallets via the normal incoming goods process (now/immediately).</p>
<p>When encountering a discrepancy in pallets/cages received, please correct this after the pallets have been imported.</p>
<p>Due to these being plant to plant shipments, the only way to correct this is to bring them in then undo the incoming goods process.</p>
<br></br>
<table >
<thead>
<tr>
<th>Running Number</th>
<th>AV</th>
<th>Description</th>
<th>Lot number</th>
<th>Quantity</th>
{{!-- <th>Downtime finish</th> --}}
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{runningNr}}</td>
<td>{{article}}</td>
<td>{{alias}}</td>
<td>{{lotNumber}}</td>
<td>{{qty}}</td>
{{!-- <td>{{dtEnd}}</td> --}}
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -11,14 +11,14 @@ export const zechettiConnect = () => {
printerId: 22, // grabbed from 2.0 printerId: 22, // grabbed from 2.0
tags: ["N7[0]"], tags: ["N7[0]"],
}, },
{ // {
id: "Zecchetti_2", // id: "Zecchetti_2",
ip: "192.168.193.111", // ip: "192.168.193.111",
slot: 0, // slot: 0,
rpi: 100, // rpi: 100,
printerId: 23, // printerId: 23,
tags: ["N8[0]"], // tags: ["N8[0]"],
}, // },
], ],
}; };

View File

@@ -41,7 +41,7 @@ export const qualityCycle = async () => {
message: "There was an error getting quality request data", message: "There was an error getting quality request data",
}; };
} }
const lstQData: any = data; const lstQData: any = data ?? [];
// get the pallets that currentStat is moved // get the pallets that currentStat is moved
// const res = await runQuery(palletMoveCheck, "palletCheck"); // const res = await runQuery(palletMoveCheck, "palletCheck");

View File

@@ -8,7 +8,7 @@ import { createLog } from "../logger/logger.js";
import { serverSettings } from "../server/controller/settings/getSettings.js"; import { serverSettings } from "../server/controller/settings/getSettings.js";
import { prodSqlConfig } from "./utils/prodServerConfig.js"; import { prodSqlConfig } from "./utils/prodServerConfig.js";
let pool: any; let pool: sql.ConnectionPool;
let connected: boolean = false; let connected: boolean = false;
export const initializeProdPool = async () => { export const initializeProdPool = async () => {
if (!installed) { if (!installed) {
@@ -34,7 +34,9 @@ export const initializeProdPool = async () => {
(n: any) => n.name === "dbServer", (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); const serverUp = await checkHostnamePort(
`${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
);
if (!serverUp) { if (!serverUp) {
createLog( createLog(
@@ -88,7 +90,8 @@ export const initializeProdPool = async () => {
} }
try { try {
const config = await prodSqlConfig(); const config = await prodSqlConfig();
pool = await sql.connect(config!); pool = new sql.ConnectionPool(config!);
await pool.connect();
createLog( createLog(
"info", "info",
@@ -164,21 +167,23 @@ export async function query(queryToRun: string, name: string) {
const dbServer = serverSettings.filter( const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer", (n: any) => n.name === "dbServer",
) as any; ) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`); // const serverUp = await checkHostnamePort(
// `${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
// );
if (!serverUp) { // if (!serverUp) {
createLog( // createLog(
"error", // "error",
"lst", // "lst",
"server", // "server",
`The sql ${dbServer[0].value} is not reachable`, // `Failed to run query due to ${dbServer[0].value} not being reachable.`,
); // );
return { // return {
success: false, // success: false,
message: `The sql ${dbServer[0].value} is not reachable`, // message: `Failed to run query due to ${dbServer[0].value} not being reachable.`,
data: [], // data: [],
}; // };
} // }
if (!connected) { if (!connected) {
createLog( createLog(

View File

@@ -61,7 +61,8 @@ V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PP' or V_Artikel.ArtikelvariantenTypBez= 'PP' or
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PET' or V_Artikel.ArtikelvariantenTypBez= 'PET' or
V_Artikel.ArtikelvariantenTypBez= 'PET-P' V_Artikel.ArtikelvariantenTypBez= 'PET-P' or
V_Artikel.ArtikelvariantenTypBez= 'PET-G'
THEN 'MM' THEN 'MM'
WHEN WHEN
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or

View File

@@ -0,0 +1,14 @@
use [test1_AlplaPROD2.0_Read]
select
humanreadableId as addressId
,ContactEmail as email
,ContactName
,ContactPhoneNumber
,Name
,Street
,City
,ZipCode
--,*
from [masterData].[Address] (nolock)
where humanreadableid = [customerAddress]

View File

@@ -0,0 +1,43 @@
use AlplaPROD_test1
/**
check if we have any new alpla bols that were created
*/
SELECT
x.idladeplanung
,e.idjournal
,e.journalNummer as bol
,e.idjournalstatus
,e.ladeDatum as loadDate
,e.bemerkung
,e.ereporting_idwerk
,e.journalDatum
,a.idadressen as addressId
,a.bezeichnung as addressDescription
,a.strasse as streetAddress
,a.ort as cityState
,a.plz as zipcode
,idauftrag as releaseNumber
--,*
FROM [dbo].[T_EAIJournal] as e with (nolock)
-- pull in the address so we only pull in florence data
left join
[dbo].[T_EAIJournalAdresse] as a with (nolock) on
a.[IdJournalAdresse] = [IdJournalKundenAdresse]
-- get the table to link the pallets to the bol
left join
[dbo].[T_EAIJournalPosition] as x with (nolock) on
x.idjournal = e.idjournal
where idjournalStatus = 62
--and idadressen = 270
and a.bezeichnung like '%Alpla%' -- we only want to monitor for addresses that are linked to alpla.
and JournalDatum > DATEADD(MINUTE, -[timeCheck], GETDATE())
and e.journalNummer not in ([ignoreBols])
and idauftrag > 1 -- this will ignore all incoming goodsv as we are really only looking for outbound deliveries
order by JournalDatum desc

View File

@@ -0,0 +1,33 @@
/*
checks the age of an inventory dose not exceed x time
*/
use AlplaPROD_test1
DECLARE @timeCheck INT = [timeTest]
select
w.IdWarenLager as idWarehouse
,w.KurzBezeichnung as warehouse
,b.IdLagerAbteilung as locationId
,x.KurzBezeichnung as 'location'
--,case when b.upd_date < Dateadd(minute, -(@timeCheck * 1.5), getdate()) then 'OVERDUE' else 'In-Progress' end as invStatus
,format(b.Upd_Date, 'M/d/yyyy HH:mm') as cycleCountStartAt
,b.Upd_User as blockedBy
--,*
from [dbo].[V_LagerAbteilungenInventuren] (nolock) as b
-- get the loction name
left join
dbo.T_LagerAbteilungen (nolock) as x
on x.IdLagerAbteilung = b.IdLagerAbteilung
-- get the whse
left join
dbo.T_WarenLager (nolock) as w
on x.idWarenLager = w.idWarenLager
where status = 1
and b.Upd_Date < Dateadd(minute, -@timeCheck, getdate())

View File

@@ -0,0 +1,8 @@
/*
disables sql jobs.
*/
EXEC msdb.dbo.sp_update_job @job_name = N'[jobName]', @enabled = 0;
-- DECLARE @JobName varchar(max) = '[jobName]'
-- UPDATE msdb.dbo.sysjobs
-- SET enabled = 0
-- WHERE name = @JobName;

View File

@@ -0,0 +1,78 @@
/*
This query will return a single running number as long as its in stock.
To get all data comment out the lfdnr in the where statmen
*/
use AlplaPROD_test1
DECLARE @runningNumber nvarchar(max) = '[runningNr]' -- when saving in lst should be '[runningNr]'
select x.idartikelVarianten as av,
ArtikelVariantenAlias as alias,
x.Lfdnr as runningNumber,
round(sum(EinlagerungsMengeVPKSum),0) as totalPallets,
sum(EinlagerungsMengeSum) as totalPalletQTY,
round(sum(VerfuegbareMengeVPKSum),0) as avaliblePallets,
sum(VerfuegbareMengeSum) as avaliablePalletQTY,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as coaPallets,
sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as coaQTY,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as heldPallets,
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as heldQTY
,IdProdPlanung as lot
,IdAdressen as addressID,
x.AdressBez as addressDescription
,x.IdLagerAbteilung as locationId
,x.lagerabteilungkurzbez as location
,lot.machine
,produktionsdatummin as productionDate
,'728'
+ RIGHT(CAST(YEAR(produktionsdatummin) AS varchar(4)), 1)
+ CAST(DATEDIFF(DAY, DATEFROMPARTS(YEAR(produktionsdatummin), 1, 1), produktionsdatummin) + 1 AS varchar(3))
+ CAST(lot.machine AS varchar(10)) as batch
,c.Description as blockingReason
,x.Barcode as barcode
--,*
from dbo.[V_LagerPositionenBarcodes] (nolock) x
left join
dbo.T_EtikettenGedruckt as l(nolock) on
x.Lfdnr = l.Lfdnr AND l.Lfdnr > 1
left join
(SELECT *
FROM [dbo].[T_BlockingDefects] where Active = 1) as c
on x.IdMainDefect = c.IdBlockingDefect
/*
get lot and machine info
*/
left join
(select location as machine,
runningnumber as lot
,planstart
,planend
from [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] (nolock) x
left join
[test1_AlplaPROD2.0_Read].[masterData].[Machine] (nolock) m on
m.id = x.machineid) as lot on
lot.lot = IdProdPlanung
/*
The data below will be controlled by the user in excel by default everything will be passed over
IdAdressen = 3
*/
where IdArtikelTyp = 1
and x.IdWarenlager in (1) -- the pallet must be in ppoo
and x.Lfdnr = @runningNumber -- comment this out when you want to get everything
group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description, IdAdressen,
x.AdressBez , x.Lfdnr,
IdProdPlanung
,x.IdLagerAbteilung
,x.lagerabteilungkurzbez
,lot.machine
,produktionsdatummin
,x.Barcode
order by x.IdArtikelVarianten

View File

@@ -0,0 +1,37 @@
use AlplaPROD_test1
select * from (SELECT
p.[IdLadePlanung]
,p.[Beleg] as lotNumber
,p.[LfdNrJeArtikelKunde] as runningNr
,p.[Barcode]
,p.[ProduktionsDatum] as productionDate
,p.[Add_User] as scanDate
,p.[Add_Date]
,p.[Upd_User]
,p.[Upd_Date]
,p.[IdJournalWarenPosition]
,p.[LieferMenge] as qty
-- ,av.IdArtikelvarianten as article
-- ,av.Bezeichnung as alias
,av.articlehumanreadableid as article
,av.ArticleDescription as alias
--,[SSCC_ReserveZiffer]
--,ROW_NUMBER() OVER (PARTITION BY p.[LfdNrJeArtikelKunde] ORDER BY p.upd_date DESC) AS RowNum
--,*
FROM [dbo].[T_EAIJournalLieferPosition] as p (nolock)
-- left join
-- dbo.T_ProdPlanung as l on
-- l.IdProdPlanung = p.Beleg
left join
[test1_AlplaPROD2.0_Read].labelling.InternalLabel as av on
av.RunningNumber = p.[LfdNrJeArtikelKunde]
) as a
where idladeplanung in ([palLinkedToBol])
--and RowNum = 1
order by runningNr

View File

@@ -29,7 +29,7 @@ left join
alplaprod_test1.dbo.V_LagerPositionenBarcodes (nolock) as l on alplaprod_test1.dbo.V_LagerPositionenBarcodes (nolock) as l on
ext.RunningNumber = l.Lfdnr ext.RunningNumber = l.Lfdnr
WHERE ext.SsccEanRunningNumber IN (@runningNumber) and WHERE ext.RunningNumber IN (@runningNumber) and
ext.RunningNumber NOT IN ( ext.RunningNumber NOT IN (
SELECT RunningNumber FROM [test1_AlplaPROD2.0_Read].[labelling].[InternalLabel] WHERE RunningNumber IN (@runningNumber) SELECT RunningNumber FROM [test1_AlplaPROD2.0_Read].[labelling].[InternalLabel] WHERE RunningNumber IN (@runningNumber)
) )

View File

@@ -10,16 +10,15 @@ export const prodSqlConfig = async () => {
// create dummy type data // create dummy type data
const server = serverSetting.filter((s: any) => s.name === "dbServer"); const server = serverSetting.filter((s: any) => s.name === "dbServer");
const plantToken = serverSetting.filter( const plantToken = serverSetting.filter(
(s: any) => s.name === "plantToken" (s: any) => s.name === "plantToken",
); );
const dbUser = serverSetting.filter((s: any) => s.name === "dbUser"); const dbUser = serverSetting.filter((s: any) => s.name === "dbUser");
// if erroring out double check the password was actually encoded before saving // if erroring out double check the password was actually encoded before saving
const dbPassword = serverSetting.filter( const dbPassword = serverSetting.filter((s: any) => s.name === "dbPass");
(s: any) => s.name === "dbPass"
);
const sqlConfig = { const sqlConfig = {
server: server[0].value, server:
process.env.NODE_ENV !== "development" ? "localhost" : server[0].value,
database: `AlplaPROD_${plantToken[0].value}_cus`, database: `AlplaPROD_${plantToken[0].value}_cus`,
user: dbUser[0].value, user: dbUser[0].value,
password: atob(dbPassword[0].value), password: atob(dbPassword[0].value),
@@ -44,8 +43,8 @@ export const prodSqlConfig = async () => {
"lst", "lst",
"sqlProd", "sqlProd",
`${JSON.stringify( `${JSON.stringify(
error error,
)} "There was an error getting/setting up the config for the prod sql server."` )} "There was an error getting/setting up the config for the prod sql server."`,
); );
} }
}; };

View File

@@ -0,0 +1,28 @@
import { readFileSync } from "fs";
export type SqlQuery = {
query: string;
success: boolean;
message: string;
};
export const sqlQuerySelector = (name: string) => {
try {
const queryFile = readFileSync(
new URL(`../querys/newQueries/${name}.sql`, import.meta.url),
"utf8",
);
return {
success: true,
message: `Query for: ${name}`,
query: queryFile,
};
} catch (error) {
return {
success: false,
message:
"Error getting the query file, please make sure you have the correct name.",
};
}
};

View File

@@ -10,7 +10,8 @@
"dev:front": "cd frontend && npm run dev", "dev:front": "cd frontend && npm run dev",
"dev:db:migrate": "npx drizzle-kit push", "dev:db:migrate": "npx drizzle-kit push",
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts", "dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
"dev": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"", "dev": "concurrently -n \"server,oldServer\" -c \"#007755, #1F73D1\" \"npm run dev:app\" \"npm run dev:old\"",
"dev:all": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"",
"dev:old": "cd lstV2 && npm run dev", "dev:old": "cd lstV2 && npm run dev",
"copy:docs": "node scripts/lstDocCopy.mjs", "copy:docs": "node scripts/lstDocCopy.mjs",
"build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs", "build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs",

View File

@@ -0,0 +1,112 @@
import net from "net";
/**
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
*/
const scannerID = "98@";
const scannerCommand = "Alplaprodcmd10"; // to consume all the pallets
const lot = "AlplaPRODchg#00000016700"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it
const labels = [
"1000000000000000000000000000000005512460",
"1000000000000000000000000000000005512470",
"1000000000000000000000000000000005512480",
"1000000000000000000000000000000005512490",
"1000000000000000000000000000000005512500",
"1000000000000000000000000000000005512510",
"1000000000000000000000000000000005512520",
"1000000000000000000000000000000005512530",
"1000000000000000000000000000000005512540",
"1000000000000000000000000000000005512550",
"1000000000000000000000000000000005512560",
"1000000000000000000000000000000005512570",
"1000000000000000000000000000000005512580",
"1000000000000000000000000000000005512590",
"1000000000000000000000000000000005512600",
"1000000000000000000000000000000005512610",
"1000000000000000000000000000000005512620",
"1000000000000000000000000000000005512630",
"1000000000000000000000000000000005512640",
"1000000000000000000000000000000005512650",
"1000000000000000000000000000000005512660",
"1000000000000000000000000000000005512670",
"1000000000000000000000000000000005512680",
"1000000000000000000000000000000005512690",
"1000000000000000000000000000000005512700",
"1000000000000000000000000000000005512710",
"1000000000000000000000000000000005512720",
"1000000000000000000000000000000005512730",
"1000000000000000000000000000000005512740",
"1000000000000000000000000000000005512750",
"1000000000000000000000000000000005512760",
"1000000000000000000000000000000005512770",
"1000000000000000000000000000000005512780",
"1000000000000000000000000000000005512790",
"1000000000000000000000000000000005512800",
"1000000000000000000000000000000005512810",
"1000000000000000000000000000000005512820",
"1000000000000000000000000000000005512830",
"1000000000000000000000000000000005512840",
"1000000000000000000000000000000005512850",
"1000000000000000000000000000000005512860",
"1000000000000000000000000000000005512870",
"1000000000000000000000000000000005512880",
"1000000000000000000000000000000005512890",
"1000000000000000000000000000000005512900",
"1000000000000000000000000000000005512910",
"1000000000000000000000000000000005512920",
"1000000000000000000000000000000005512930",
"1000000000000000000000000000000005512940",
"1000000000000000000000000000000005512950",
"1000000000000000000000000000000005512960",
];
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
scanner.connect(50000, "10.204.0.26", async () => {
console.log("Connected to scanner");
// change the scanner to the to 112
let message = Buffer.from(
`${STX}${scannerID}${scannerCommand}${ETX}`,
"ascii",
);
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 2000));
// bookin all the pallets in the array
await new Promise((resolve) => setTimeout(resolve, 2000));
for (let i = 0; i < labels.length; i++) {
const l = labels[i];
message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 1200));
}
await new Promise((resolve) => setTimeout(resolve, 1500));
scanner.destroy();
});
scanner.on("data", async (data) => {
console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
});
scanner.on("close", () => {
console.log("Connection closed");
});
scanner.on("error", (err) => {
console.error("Scanner error:", err);
});

View File

@@ -0,0 +1,100 @@
import net from "net";
/**
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
*/
const scannerID = "98@";
const scannerCommand = "Alplaprodcmd112"; // to consume all the pallets
const lot = "AlplaPRODchg#11601"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it
const labels = [
"1000000000000000000000000000000004551860",
"1000000000000000000000000000000004551640",
"1000000000000000000000000000000004551840",
"1000000000000000000000000000000004551610",
"1000000000000000000000000000000004551720",
"1000000000000000000000000000000004551680",
"1000000000000000000000000000000004551740",
"1000000000000000000000000000000004551660",
"1000000000000000000000000000000004551570",
"1000000000000000000000000000000004551480",
"1000000000000000000000000000000004551510",
"1000000000000000000000000000000004551460",
"1000000000000000000000000000000004551600",
"1000000000000000000000000000000004551340",
"1000000000000000000000000000000004551580",
"1000000000000000000000000000000004551330",
"1000000000000000000000000000000004551290",
"1000000000000000000000000000000004551180",
"1000000000000000000000000000000004551260",
"1000000000000000000000000000000004551150",
"1000000000000000000000000000000004551390",
"1000000000000000000000000000000004551440",
"1000000000000000000000000000000004551360",
"1000000000000000000000000000000004551400",
"1000000000000000000000000000000004544780",
"1000000000000000000000000000000004551230",
"1000000000000000000000000000000004544770",
"1000000000000000000000000000000004551200",
"1000000000000000000000000000000004544850",
"1000000000000000000000000000000004548370",
"1000000000000000000000000000000004544840",
"1000000000000000000000000000000004548470",
"1000000000000000000000000000000004611380",
"1000000000000000000000000000000004611470",
"1000000000000000000000000000000004611440",
];
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
scanner.connect(50001, "10.80.0.26", async () => {
console.log("Connected to scanner");
// change the scanner to the to 112
let message = Buffer.from(
`${STX}${scannerID}${scannerCommand}${ETX}`,
"ascii",
);
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 2000));
// consume all the pallets in the array
await new Promise((resolve) => setTimeout(resolve, 2000));
for (let i = 0; i < labels.length; i++) {
const l = labels[i];
// trigger the lot
let message = Buffer.from(`${STX}${scannerID}${lot}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 1200));
}
await new Promise((resolve) => setTimeout(resolve, 1500));
scanner.destroy();
});
scanner.on("data", async (data) => {
console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
});
scanner.on("close", () => {
console.log("Connection closed");
});
scanner.on("error", (err) => {
console.error("Scanner error:", err);
});

View File

@@ -0,0 +1,187 @@
import net from "net";
/**
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
*/
const prodIP = "10.204.0.26";
const prodPort = 50000;
const scannerID = "98@";
const scannerCommand = "AlplaPRODcmd00000042#000028643"; // top of the picksheet
const scannerCommand2 = ""; // bottom of the pick sheet
const labels = [
"1000000000000000000000000000000005572620",
"1000000000000000000000000000000005572630",
"1000000000000000000000000000000005572640",
"1000000000000000000000000000000005572650",
"1000000000000000000000000000000005572660",
"1000000000000000000000000000000005572670",
"1000000000000000000000000000000005572680",
"1000000000000000000000000000000005572690",
"1000000000000000000000000000000005572700",
"1000000000000000000000000000000005572710",
"1000000000000000000000000000000005572720",
"1000000000000000000000000000000005572730",
"1000000000000000000000000000000005572740",
"1000000000000000000000000000000005572750",
"1000000000000000000000000000000005572760",
"1000000000000000000000000000000005572770",
"1000000000000000000000000000000005572780",
"1000000000000000000000000000000005572790",
"1000000000000000000000000000000005572800",
"1000000000000000000000000000000005572810",
"1000000000000000000000000000000005572820",
"1000000000000000000000000000000005572830",
"1000000000000000000000000000000005572840",
"1000000000000000000000000000000005572850",
"1000000000000000000000000000000005572860",
"1000000000000000000000000000000005572870",
"1000000000000000000000000000000005572880",
"1000000000000000000000000000000005572890",
"1000000000000000000000000000000005572900",
"1000000000000000000000000000000005572910",
"1000000000000000000000000000000005573226",
"1000000000000000000000000000000005573236",
"1000000000000000000000000000000005573246",
"1000000000000000000000000000000005573256",
"1000000000000000000000000000000005573266",
"1000000000000000000000000000000005573276",
"1000000000000000000000000000000005573286",
"1000000000000000000000000000000005573296",
"1000000000000000000000000000000005573306",
"1000000000000000000000000000000005573316",
"1000000000000000000000000000000005573326",
"1000000000000000000000000000000005573336",
"1000000000000000000000000000000005573346",
"1000000000000000000000000000000005573356",
"1000000000000000000000000000000005573366",
"1000000000000000000000000000000005573376",
"1000000000000000000000000000000005573386",
"1000000000000000000000000000000005573396",
"1000000000000000000000000000000005573406",
"1000000000000000000000000000000005573416",
"1000000000000000000000000000000005573426",
"1000000000000000000000000000000005573436",
"1000000000000000000000000000000005573446",
"1000000000000000000000000000000005573456",
"1000000000000000000000000000000005573466",
"1000000000000000000000000000000005573476",
"1000000000000000000000000000000005573486",
"1000000000000000000000000000000005573496",
"1000000000000000000000000000000005573506",
"1000000000000000000000000000000005573516",
"1000000000000000000000000000000005581616",
"1000000000000000000000000000000005581626",
"1000000000000000000000000000000005581636",
"1000000000000000000000000000000005581646",
"1000000000000000000000000000000005581656",
"1000000000000000000000000000000005581666",
"1000000000000000000000000000000005581676",
"1000000000000000000000000000000005581686",
"1000000000000000000000000000000005581696",
"1000000000000000000000000000000005581706",
"1000000000000000000000000000000005581716",
"1000000000000000000000000000000005581726",
"1000000000000000000000000000000005581736",
"1000000000000000000000000000000005581746",
"1000000000000000000000000000000005581756",
"1000000000000000000000000000000005581766",
"1000000000000000000000000000000005581776",
"1000000000000000000000000000000005581786",
"1000000000000000000000000000000005581796",
"1000000000000000000000000000000005581806",
"1000000000000000000000000000000005581816",
"1000000000000000000000000000000005581826",
"1000000000000000000000000000000005581836",
"1000000000000000000000000000000005581846",
"1000000000000000000000000000000005581856",
"1000000000000000000000000000000005582760",
"1000000000000000000000000000000005581866",
"1000000000000000000000000000000005581876",
"1000000000000000000000000000000005581886",
"1000000000000000000000000000000005581896",
"1000000000000000000000000000000005581906",
"1000000000000000000000000000000005581310",
"1000000000000000000000000000000005581320",
"1000000000000000000000000000000005581330",
"1000000000000000000000000000000005581340",
"1000000000000000000000000000000005581350",
"1000000000000000000000000000000005581360",
"1000000000000000000000000000000005581370",
"1000000000000000000000000000000005581380",
"1000000000000000000000000000000005581390",
"1000000000000000000000000000000005581400",
"1000000000000000000000000000000005581410",
"1000000000000000000000000000000005581420",
"1000000000000000000000000000000005581430",
"1000000000000000000000000000000005581440",
"1000000000000000000000000000000005581450",
"1000000000000000000000000000000005581460",
"1000000000000000000000000000000005581470",
"1000000000000000000000000000000005581480",
"1000000000000000000000000000000005581490",
"1000000000000000000000000000000005581500",
"1000000000000000000000000000000005581510",
"1000000000000000000000000000000005581520",
"1000000000000000000000000000000005581530",
"1000000000000000000000000000000005581540",
"1000000000000000000000000000000005581550",
"1000000000000000000000000000000005581560",
"1000000000000000000000000000000005581570",
"1000000000000000000000000000000005581580",
"1000000000000000000000000000000005581590",
"1000000000000000000000000000000005581600",
];
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
scanner.connect(prodPort, prodIP, async () => {
console.log("Connected to scanner");
const message = Buffer.from(
`${STX}${scannerID}${scannerCommand}${ETX}`,
"ascii",
);
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 2000));
for (let i = 0; i < labels.length; i++) {
const l = labels[i];
const message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
console.log("Sending:", message.toString("ascii"));
scanner.write(message);
await new Promise((resolve) => setTimeout(resolve, 1200));
}
// //close the incoming
// await new Promise(resolve => setTimeout(resolve, 1500));
// const message2 = Buffer.from(`${STX}${scannerID}${scannerCommand2}${ETX}`, "ascii");
// console.log("Sending:", message2.toString("ascii"));
// scanner.write(message2);
await new Promise((resolve) => setTimeout(resolve, 1500));
scanner.destroy();
});
scanner.on("data", async (data) => {
console.log(
"Response:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
});
scanner.on("close", () => {
console.log("Connection closed");
});
scanner.on("error", (err) => {
console.error("Scanner error:", err);
});