Compare commits
42 Commits
09f16f4e62
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| dcfa56bdb9 | |||
| ea92422bb1 | |||
| 2111a5fdc9 | |||
| 6edd20585f | |||
| a9759795c4 | |||
| 32f26a1725 | |||
| 60533beed5 | |||
| 24ced97b6d | |||
| dc1d342799 | |||
| 44d0cb63cf | |||
| ace73fa919 | |||
| 316af4233f | |||
| 36a805c652 | |||
| 460bc3d24a | |||
| ec201fcfb5 | |||
| 914ad46c43 | |||
| b96c546ed3 | |||
| 29b3be41a1 | |||
| 16edf58025 | |||
| 775627f215 | |||
| 4e70fae69b | |||
| 24dd109a21 | |||
| 38b57a00cc | |||
| f8070db95f | |||
| 10e9dc430c | |||
| 6b669ccd9c | |||
| d9a10d98a1 | |||
| e64dc7c013 | |||
| d63138d746 | |||
| 84a28f2d01 | |||
| 9be6614972 | |||
| 9d0db71f6a | |||
| 3cc55436f3 | |||
| 124fde07e0 | |||
| b15d0d7322 | |||
| 0680f332fb | |||
| 46bf310dce | |||
| 0dda6ae744 | |||
| 1b59cdd3a4 | |||
| 56934216f7 | |||
| e8a2ef8b85 | |||
| 6cbffa4ac5 |
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -49,9 +49,11 @@
|
||||
"go.formatTool": "goimports",
|
||||
"cSpell.words": [
|
||||
"acitve",
|
||||
"actaully",
|
||||
"alpla",
|
||||
"alplamart",
|
||||
"alplaprod",
|
||||
"autoconsume",
|
||||
"intiallally",
|
||||
"ppoo",
|
||||
"prodlabels",
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
meta {
|
||||
name: Error logging
|
||||
type: http
|
||||
seq: 4
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{urlv2}}/api/notify/toomanyerrors
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
22
LogisticsSupportTool_API_DOCS/LstV2/Warehouse/sscc.bru
Normal file
22
LogisticsSupportTool_API_DOCS/LstV2/Warehouse/sscc.bru
Normal file
@@ -0,0 +1,22 @@
|
||||
meta {
|
||||
name: sscc
|
||||
type: http
|
||||
seq: 4
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{url}}/lst/old/api/logistics/getsscc
|
||||
body: json
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
body:json {
|
||||
{
|
||||
"runningNr": ""
|
||||
}
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
meta {
|
||||
name: PSI -planning data
|
||||
type: http
|
||||
seq: 2
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{url}}/lst/old/api/datamart/psiplanningdata?avs=118,120&startDate=12/1/2025&endDate=12/31/2026
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
params:query {
|
||||
avs: 118,120
|
||||
startDate: 12/1/2025
|
||||
endDate: 12/31/2026
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -5,11 +5,12 @@ meta {
|
||||
}
|
||||
|
||||
get {
|
||||
url:
|
||||
url: {{url}}/lst/api/logistics/getsscc
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"version": "1",
|
||||
"name": "LogisticsSupportTool_API_DOCS",
|
||||
"name": "lstv2",
|
||||
"type": "collection",
|
||||
"ignore": [
|
||||
"node_modules",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
vars {
|
||||
url: https://uslim1prod.alpla.net
|
||||
url: http://localhost:5500
|
||||
session_cookie:
|
||||
urlv2: http://usbow1vms006:3000
|
||||
jwtV2:
|
||||
|
||||
24
LogisticsSupportTool_API_DOCS/logistics/bookout.bru
Normal file
24
LogisticsSupportTool_API_DOCS/logistics/bookout.bru
Normal file
@@ -0,0 +1,24 @@
|
||||
meta {
|
||||
name: bookout
|
||||
type: http
|
||||
seq: 2
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{url}}/lst/old/api/logistics/bookout
|
||||
body: json
|
||||
auth: none
|
||||
}
|
||||
|
||||
body:json {
|
||||
{
|
||||
|
||||
"runningNr": "1865027",
|
||||
"reason": "packer printed premature"
|
||||
}
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
8
LogisticsSupportTool_API_DOCS/logistics/folder.bru
Normal file
8
LogisticsSupportTool_API_DOCS/logistics/folder.bru
Normal file
@@ -0,0 +1,8 @@
|
||||
meta {
|
||||
name: logistics
|
||||
seq: 7
|
||||
}
|
||||
|
||||
auth {
|
||||
mode: inherit
|
||||
}
|
||||
24
LogisticsSupportTool_API_DOCS/logistics/relocate.bru
Normal file
24
LogisticsSupportTool_API_DOCS/logistics/relocate.bru
Normal file
@@ -0,0 +1,24 @@
|
||||
meta {
|
||||
name: relocate
|
||||
type: http
|
||||
seq: 1
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{url}}/lst/old/api/logistics/relocate
|
||||
body: json
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
body:json {
|
||||
{
|
||||
|
||||
"runningNr": "56121541",
|
||||
"laneID": "30006"
|
||||
}
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
24
LogisticsSupportTool_API_DOCS/logistics/removeAsWaste.bru
Normal file
24
LogisticsSupportTool_API_DOCS/logistics/removeAsWaste.bru
Normal file
@@ -0,0 +1,24 @@
|
||||
meta {
|
||||
name: removeAsWaste
|
||||
type: http
|
||||
seq: 3
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{url}}/lst/old/api/logistics/removeasreusable
|
||||
body: json
|
||||
auth: none
|
||||
}
|
||||
|
||||
body:json {
|
||||
{
|
||||
|
||||
"runningNr": "1865018",
|
||||
"reason": "validating stockout"
|
||||
}
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -78,7 +78,7 @@ const main = async () => {
|
||||
|
||||
// connect to the prod sql
|
||||
console.log("Connecting to the sql server");
|
||||
await initializeProdPool();
|
||||
|
||||
|
||||
// express app
|
||||
const app = express();
|
||||
@@ -184,7 +184,7 @@ const main = async () => {
|
||||
// swaggerUi.serve,
|
||||
// swaggerUi.setup(openapiSpec, swaggerUiOptions),
|
||||
// );
|
||||
|
||||
initializeProdPool();
|
||||
setupSwagger(app, basePath)
|
||||
app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build")));
|
||||
app.use(
|
||||
|
||||
@@ -58,6 +58,8 @@ router.get("/", async (req, res) => {
|
||||
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
|
||||
used.rss / 1024 / 1024
|
||||
).toFixed(2)} MB`,
|
||||
eomFGPkgSheetVersion: 1, // this is the excel file version when we have a change to the macro we want to grab this
|
||||
masterMacroFile: 1, // this is the excel file version when we have a change to the macro we want to grab this
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,123 +1,125 @@
|
||||
import type { Address } from "nodemailer/lib/mailer/index.js";
|
||||
import type { Transporter } from "nodemailer";
|
||||
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
|
||||
import type Mail from "nodemailer/lib/mailer/index.js";
|
||||
import os from "os";
|
||||
import nodemailer from "nodemailer";
|
||||
import type Mail from "nodemailer/lib/mailer/index.js";
|
||||
import type { Address } from "nodemailer/lib/mailer/index.js";
|
||||
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
|
||||
import hbs from "nodemailer-express-handlebars";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import { promisify } from "util";
|
||||
import hbs from "nodemailer-express-handlebars";
|
||||
import { createLogger } from "../../logger/logger.js";
|
||||
|
||||
interface HandlebarsMailOptions extends Mail.Options {
|
||||
template: string;
|
||||
context: Record<string, unknown>;
|
||||
template: string;
|
||||
context: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface EmailData {
|
||||
email: string;
|
||||
subject: string;
|
||||
template: string;
|
||||
context: Record<string, unknown>;
|
||||
email: string;
|
||||
subject: string;
|
||||
template: string;
|
||||
context: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export const sendEmail = async (data: EmailData): Promise<any> => {
|
||||
const log = createLogger({ module: "pkg", subModule: "sendMail" });
|
||||
let transporter: Transporter;
|
||||
let fromEmail: string | Address;
|
||||
const log = createLogger({ module: "pkg", subModule: "sendMail" });
|
||||
let transporter: Transporter;
|
||||
let fromEmail: string | Address;
|
||||
|
||||
if (
|
||||
os.hostname().includes("OLP") &&
|
||||
process.env.EMAIL_USER &&
|
||||
process.env.EMAIL_PASSWORD
|
||||
) {
|
||||
transporter = nodemailer.createTransport({
|
||||
service: "gmail",
|
||||
auth: {
|
||||
user: process.env.EMAIL_USER,
|
||||
pass: process.env.EMAIL_PASSWORD,
|
||||
},
|
||||
//debug: true,
|
||||
});
|
||||
// if (
|
||||
// os.hostname().includes("OLP") &&
|
||||
// process.env.EMAIL_USER &&
|
||||
// process.env.EMAIL_PASSWORD
|
||||
// ) {
|
||||
// transporter = nodemailer.createTransport({
|
||||
// service: "gmail",
|
||||
// auth: {
|
||||
// user: process.env.EMAIL_USER,
|
||||
// pass: process.env.EMAIL_PASSWORD,
|
||||
// },
|
||||
// //debug: true,
|
||||
// });
|
||||
|
||||
// update the from email
|
||||
fromEmail = process.env.EMAIL_USER;
|
||||
} else {
|
||||
// convert to the correct plant token.
|
||||
// // update the from email
|
||||
// fromEmail = process.env.EMAIL_USER;
|
||||
// } else {
|
||||
// // convert to the correct plant token.
|
||||
|
||||
let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
|
||||
//let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
|
||||
|
||||
//const testServers = ["vms036", "VMS036"];
|
||||
//const testServers = ["vms036", "VMS036"];
|
||||
|
||||
if (os.hostname().includes("VMS036")) {
|
||||
host = "USMCD1-smtp.alpla.net";
|
||||
}
|
||||
// if (os.hostname().includes("VMS036")) {
|
||||
// host = "USMCD1-smtp.alpla.net";
|
||||
// }
|
||||
|
||||
// if (plantToken[0].value === "usiow2") {
|
||||
// host = "USIOW1-smtp.alpla.net";
|
||||
// }
|
||||
// if (plantToken[0].value === "usiow2") {
|
||||
// host = "USIOW1-smtp.alpla.net";
|
||||
// }
|
||||
|
||||
transporter = nodemailer.createTransport({
|
||||
host: host,
|
||||
port: 25,
|
||||
rejectUnauthorized: false,
|
||||
//secure: false,
|
||||
// auth: {
|
||||
// user: "alplaprod",
|
||||
// pass: "obelix",
|
||||
// },
|
||||
debug: true,
|
||||
} as SMTPTransport.Options);
|
||||
transporter = nodemailer.createTransport({
|
||||
host: "smtp.azurecomm.net",
|
||||
port: 587,
|
||||
//rejectUnauthorized: false,
|
||||
tls: {
|
||||
minVersion: "TLSv1.2",
|
||||
},
|
||||
auth: {
|
||||
user: "donotreply@mail.alpla.com",
|
||||
pass: process.env.SMTP_PASSWORD,
|
||||
},
|
||||
debug: true,
|
||||
} as SMTPTransport.Options);
|
||||
|
||||
// update the from email
|
||||
fromEmail = `noreply@alpla.com`;
|
||||
}
|
||||
// update the from email
|
||||
fromEmail = `DoNotReply@mail.alpla.com`;
|
||||
//}
|
||||
|
||||
// creating the handlbar options
|
||||
const viewPath = path.resolve(
|
||||
path.dirname(fileURLToPath(import.meta.url)),
|
||||
"./views/"
|
||||
);
|
||||
// creating the handlbar options
|
||||
const viewPath = path.resolve(
|
||||
path.dirname(fileURLToPath(import.meta.url)),
|
||||
"./views/",
|
||||
);
|
||||
|
||||
const handlebarOptions = {
|
||||
viewEngine: {
|
||||
extname: ".hbs",
|
||||
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
|
||||
defaultLayout: "", // Specify the default layout
|
||||
partialsDir: viewPath,
|
||||
},
|
||||
viewPath: viewPath,
|
||||
extName: ".hbs", // File extension for Handlebars templates
|
||||
};
|
||||
const handlebarOptions = {
|
||||
viewEngine: {
|
||||
extname: ".hbs",
|
||||
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
|
||||
defaultLayout: "", // Specify the default layout
|
||||
partialsDir: viewPath,
|
||||
},
|
||||
viewPath: viewPath,
|
||||
extName: ".hbs", // File extension for Handlebars templates
|
||||
};
|
||||
|
||||
transporter.use("compile", hbs(handlebarOptions));
|
||||
transporter.use("compile", hbs(handlebarOptions));
|
||||
|
||||
const mailOptions: HandlebarsMailOptions = {
|
||||
from: fromEmail,
|
||||
to: data.email,
|
||||
subject: data.subject,
|
||||
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
|
||||
//html: emailTemplate("BlakesTest", "This is an example with css"),
|
||||
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
|
||||
context: data.context,
|
||||
};
|
||||
const mailOptions: HandlebarsMailOptions = {
|
||||
from: fromEmail,
|
||||
to: data.email,
|
||||
subject: data.subject,
|
||||
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
|
||||
//html: emailTemplate("BlakesTest", "This is an example with css"),
|
||||
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
|
||||
context: data.context,
|
||||
};
|
||||
|
||||
// now verify and send the email
|
||||
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
|
||||
// now verify and send the email
|
||||
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
|
||||
|
||||
try {
|
||||
// Send email and await the result
|
||||
const info = await sendMailPromise(mailOptions);
|
||||
log.info(null, `Email was sent to: ${data.email}`);
|
||||
return { success: true, message: "Email sent.", data: info };
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
log.error(
|
||||
{ error: err },
|
||||
try {
|
||||
// Send email and await the result
|
||||
const info = await sendMailPromise(mailOptions);
|
||||
log.info(null, `Email was sent to: ${data.email}`);
|
||||
return { success: true, message: "Email sent.", data: info };
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
log.error(
|
||||
{ error: err },
|
||||
|
||||
`Error sending Email to : ${data.email}`
|
||||
);
|
||||
return { success: false, message: "Error sending email.", error: err };
|
||||
}
|
||||
`Error sending Email to : ${data.email}`,
|
||||
);
|
||||
return { success: false, message: "Error sending email.", error: err };
|
||||
}
|
||||
};
|
||||
|
||||
@@ -12,14 +12,15 @@ import { LstCard } from "../../../extendedUi/LstCard";
|
||||
export default function Relocate() {
|
||||
const [bookingIn, setBookingIn] = useState(false);
|
||||
const form = useForm({
|
||||
defaultValues: { runningNr: " ", lane: "" },
|
||||
defaultValues: { runningNr: " ", laneID: "" },
|
||||
onSubmit: async ({ value }) => {
|
||||
// Do something with form data
|
||||
setBookingIn(true);
|
||||
|
||||
try {
|
||||
const res = await axios.post("/lst/old/api/ocp/bookin", {
|
||||
const res = await axios.post("/lst/old/api/logistics/relocate", {
|
||||
runningNr: parseInt(value.runningNr),
|
||||
laneID: parseInt(value.laneID),
|
||||
});
|
||||
|
||||
if (res.data.success) {
|
||||
@@ -27,15 +28,15 @@ export default function Relocate() {
|
||||
form.reset();
|
||||
setBookingIn(false);
|
||||
} else {
|
||||
console.log(res.data.data.errors);
|
||||
toast.error(res.data.data.errors[0]?.message);
|
||||
form.reset();
|
||||
console.log(res.data.message);
|
||||
toast.error(res.data.message);
|
||||
//form.reset();
|
||||
setBookingIn(false);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
toast.error(
|
||||
"There was an error booking in pallet please validate you entered the correct info and try again.",
|
||||
"There was an error relocating the pallet please validate the data.",
|
||||
);
|
||||
setBookingIn(false);
|
||||
}
|
||||
@@ -58,7 +59,7 @@ export default function Relocate() {
|
||||
validators={{
|
||||
// We can choose between form-wide and field-specific validators
|
||||
onChange: ({ value }) =>
|
||||
value.length > 2
|
||||
value.length > 0
|
||||
? undefined
|
||||
: "Please enter a valid running number",
|
||||
}}
|
||||
@@ -83,19 +84,17 @@ export default function Relocate() {
|
||||
}}
|
||||
/>
|
||||
<form.Field
|
||||
name="lane"
|
||||
name="laneID"
|
||||
validators={{
|
||||
// We can choose between form-wide and field-specific validators
|
||||
onChange: ({ value }) =>
|
||||
value.length > 2
|
||||
? undefined
|
||||
: "Please enter a valid running number",
|
||||
value.length > 0 ? undefined : "Please enter a valid lane ID",
|
||||
}}
|
||||
children={(field) => {
|
||||
return (
|
||||
<div className="">
|
||||
<Label htmlFor="runningNr" className="mb-2">
|
||||
Enter lane
|
||||
<Label htmlFor="laneID" className="mb-2">
|
||||
Enter lane ID
|
||||
</Label>
|
||||
<Input
|
||||
name={field.name}
|
||||
|
||||
@@ -7,14 +7,18 @@ export default function HelperPage() {
|
||||
return (
|
||||
<div className="flex flex-wrap m-2 justify-center">
|
||||
<div className="m-1">
|
||||
<Bookin />
|
||||
<div className="m-1 ">
|
||||
<Bookin />
|
||||
</div>
|
||||
<div className="w-96 m-1">
|
||||
<Relocate />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="m-1">
|
||||
{url === "localhost" && (
|
||||
<div className="m-1">
|
||||
<RemoveAsNonReusable />
|
||||
<Relocate />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -30,6 +30,7 @@ import { useSettingStore } from "../../../-lib/store/useSettings";
|
||||
const printReason = [
|
||||
{ key: "printerIssue", label: "Printer Related" },
|
||||
{ key: "missingRfidTag", label: "Missing or incorrect tag" },
|
||||
{ key: "multipleTags", label: "More than one tag on pallet." },
|
||||
{ key: "rfidMissScan", label: "Missed Scan from RFID reader" },
|
||||
{ key: "strapper", label: "Strapper Error" },
|
||||
{ key: "manualCheck", label: "20th pallet check" },
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import { text, pgTable, timestamp, uuid, jsonb } from "drizzle-orm/pg-core";
|
||||
import { jsonb, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import { z } from "zod";
|
||||
|
||||
export const commandLog = pgTable(
|
||||
"commandLog",
|
||||
{
|
||||
commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(),
|
||||
commandUsed: text("commandUsed").notNull(),
|
||||
bodySent: jsonb("bodySent").default([]),
|
||||
reasonUsed: text("reasonUsed"),
|
||||
add_at: timestamp("add_Date").defaultNow(),
|
||||
},
|
||||
(table) => [
|
||||
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||
// uniqueIndex("role_name").on(table.name),
|
||||
]
|
||||
"commandLog",
|
||||
{
|
||||
commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(),
|
||||
commandUsed: text("commandUsed").notNull(),
|
||||
bodySent: jsonb("bodySent").default([]),
|
||||
reasonUsed: text("reasonUsed"),
|
||||
addDate: timestamp("add_Date").defaultNow(),
|
||||
},
|
||||
(table) => [
|
||||
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||
// uniqueIndex("role_name").on(table.name),
|
||||
],
|
||||
);
|
||||
|
||||
// Schema for inserting a user - can be used to validate API requests
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
|
||||
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
|
||||
"build": "npm run build:server",
|
||||
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ",
|
||||
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y && npm run build:copySql",
|
||||
"build:copySql": "xcopy server\\services\\sqlServer\\querys\\newQueries dist\\server\\services\\sqlServer\\querys\\newQueries\\ /E /I /Y ",
|
||||
"build:frontend": "cd frontend && npm run build",
|
||||
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
|
||||
"copy:scripts": "tsx server/scripts/copyScripts.ts",
|
||||
|
||||
191
lstV2/server/globalUtils/scannerConnect.ts
Normal file
191
lstV2/server/globalUtils/scannerConnect.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
/**
|
||||
* Using this to make a scanner connection to the server.
|
||||
*/
|
||||
|
||||
import net from "net";
|
||||
|
||||
interface QueuedCommand {
|
||||
command: string;
|
||||
resolve: (value: string) => void;
|
||||
reject: (reason?: any) => void;
|
||||
timeout: NodeJS.Timeout;
|
||||
}
|
||||
|
||||
const STX = "\x02";
|
||||
const ETX = "\x03";
|
||||
|
||||
// const prodIP = process.env.SERVER_IP as string;
|
||||
// const prodPort = parseInt(process.env.SCANNER_PORT || "50000", 10);
|
||||
// const scannerID = `${process.env.SCANNER_ID}@`;
|
||||
//const scannerCommand = "AlplaPRODcmd00000042#000028547"; // top of the picksheet
|
||||
|
||||
export class ScannerClient {
|
||||
private socket = new net.Socket();
|
||||
private connected = false;
|
||||
|
||||
private queue: QueuedCommand[] = [];
|
||||
private processing = false;
|
||||
|
||||
private incomingBuffer = "";
|
||||
|
||||
constructor(
|
||||
private host: string,
|
||||
private port: number,
|
||||
private scannerId: string,
|
||||
) {
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
private initialize() {
|
||||
if (!this.host || !this.port) {
|
||||
console.log("Host or port is missing");
|
||||
return;
|
||||
}
|
||||
this.socket.connect(this.port, this.host, () => {
|
||||
console.info("Connected to scanner");
|
||||
this.connected = true;
|
||||
});
|
||||
|
||||
this.socket.on("data", (data) => this.handleData(data));
|
||||
|
||||
this.socket.on("close", () => {
|
||||
console.log("Scanner connection closed");
|
||||
this.connected = false;
|
||||
});
|
||||
|
||||
this.socket.on("error", (err) => {
|
||||
console.error("Scanner error:", err);
|
||||
});
|
||||
}
|
||||
|
||||
// ✅ Public method you use
|
||||
public scan(command: string): Promise<string> {
|
||||
if (!this.connected) {
|
||||
return Promise.reject("Scanner not connected");
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timeout = setTimeout(() => {
|
||||
this.processing = false;
|
||||
reject("Scanner timeout");
|
||||
this.processQueue();
|
||||
}, 5000); // 5s safety timeout
|
||||
|
||||
this.queue.push({
|
||||
command,
|
||||
resolve,
|
||||
reject,
|
||||
timeout,
|
||||
});
|
||||
|
||||
this.processQueue();
|
||||
});
|
||||
}
|
||||
|
||||
// ✅ Ensures strict FIFO processing
|
||||
private processQueue() {
|
||||
if (this.processing) return;
|
||||
if (this.queue.length === 0) return;
|
||||
|
||||
this.processing = true;
|
||||
|
||||
const current = this.queue[0];
|
||||
const message = Buffer.from(
|
||||
`${STX}${this.scannerId}${current.command}${ETX}`,
|
||||
"ascii",
|
||||
);
|
||||
|
||||
this.socket.write(message);
|
||||
}
|
||||
|
||||
// ✅ Handles full STX/ETX framed responses
|
||||
private handleData(data: Buffer) {
|
||||
console.log(
|
||||
"ASCII:",
|
||||
data
|
||||
.toString("ascii")
|
||||
.replace(/\x00/g, "") // remove null bytes
|
||||
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
|
||||
.trim(),
|
||||
);
|
||||
|
||||
const current = this.queue.shift();
|
||||
if (current) {
|
||||
clearTimeout(current.timeout);
|
||||
current.resolve(data.toString("ascii"));
|
||||
}
|
||||
|
||||
this.processing = false;
|
||||
this.processQueue();
|
||||
}
|
||||
}
|
||||
|
||||
export const scanner = new ScannerClient(
|
||||
process.env.SERVER_IP!,
|
||||
parseInt(process.env.SCANNER_PORT!, 10),
|
||||
`${process.env.SCANNER_ID}@`,
|
||||
);
|
||||
|
||||
// export const connectToScanner = () => {
|
||||
// if (!process.env.SERVER_IP || !process.env.SCANNER_PORT) {
|
||||
// return {
|
||||
// success: false,
|
||||
// message: "Missing ServerIP or ServerPort",
|
||||
// };
|
||||
// }
|
||||
|
||||
// scanner.connect(prodPort, prodIP, () => {
|
||||
// console.log("Connected to scanner");
|
||||
// connected = true;
|
||||
// });
|
||||
// };
|
||||
|
||||
// export const scan = async (command: string) => {
|
||||
// if (!connected) {
|
||||
// return {
|
||||
// success: false,
|
||||
// message: "Scanner is not connected, please contact admin",
|
||||
// };
|
||||
// }
|
||||
// if (inScanCommand) {
|
||||
// bufferCommands.push({ timeStamp: new Date(Date.now()), command: command });
|
||||
// }
|
||||
|
||||
// // we are going to set to scanning
|
||||
// inScanCommand = true;
|
||||
|
||||
// const message = Buffer.from(`${STX}${scannerID}${command}${ETX}`, "ascii");
|
||||
// scanner.write(message);
|
||||
// await new Promise((resolve) => setTimeout(resolve, 750));
|
||||
|
||||
// inScanCommand = false;
|
||||
|
||||
// if (bufferCommands.length > 0) {
|
||||
// await scan(bufferCommands[0].command);
|
||||
// bufferCommands.shift();
|
||||
// }
|
||||
|
||||
// return {
|
||||
// success: true,
|
||||
// message: "Scan completed",
|
||||
// };
|
||||
// };
|
||||
|
||||
// scanner.on("data", async (data) => {
|
||||
// console.log(
|
||||
// "Response:",
|
||||
// data
|
||||
// .toString("ascii")
|
||||
// .replace(/\x00/g, "") // remove null bytes
|
||||
// .replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
|
||||
// .trim(),
|
||||
// );
|
||||
// });
|
||||
|
||||
// scanner.on("close", () => {
|
||||
// console.log("Connection closed");
|
||||
// });
|
||||
|
||||
// scanner.on("error", (err) => {
|
||||
// console.error("Scanner error:", err);
|
||||
// });
|
||||
@@ -1,84 +1,95 @@
|
||||
import { addDays, format } from "date-fns";
|
||||
import { formatInTimeZone } from "date-fns-tz";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../database/schema/settings.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import { deliveryByDateRange } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js";
|
||||
import { addDays, format } from "date-fns";
|
||||
|
||||
export const getDeliveryByDateRange = async (data: any | null) => {
|
||||
// const { data: plantToken, error: plantError } = await tryCatch(
|
||||
// db.select().from(settings).where(eq(settings.name, "plantToken"))
|
||||
// );
|
||||
// if (plantError) {
|
||||
// return {
|
||||
// success: false,
|
||||
// message: "Error getting Settings",
|
||||
// data: plantError,
|
||||
// };
|
||||
// }
|
||||
let deliverys: any = [];
|
||||
// const { data: plantToken, error: plantError } = await tryCatch(
|
||||
// db.select().from(settings).where(eq(settings.name, "plantToken"))
|
||||
// );
|
||||
// if (plantError) {
|
||||
// return {
|
||||
// success: false,
|
||||
// message: "Error getting Settings",
|
||||
// data: plantError,
|
||||
// };
|
||||
// }
|
||||
let deliverys: any = [];
|
||||
|
||||
let updatedQuery = deliveryByDateRange;
|
||||
let updatedQuery = deliveryByDateRange;
|
||||
|
||||
// start days can be sent over
|
||||
if (data?.start) {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", data.start[0]);
|
||||
} else {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
|
||||
}
|
||||
// start days can be sent over
|
||||
if (data?.start) {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", data.start[0]);
|
||||
} else {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
|
||||
}
|
||||
|
||||
// end days can be sent over
|
||||
if (data?.end) {
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", data.end[0]);
|
||||
} else {
|
||||
const defaultEndDate = format(
|
||||
addDays(new Date(Date.now()), 5),
|
||||
"yyyy-M-d"
|
||||
);
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
|
||||
}
|
||||
// end days can be sent over
|
||||
if (data?.end) {
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", data.end[0]);
|
||||
} else {
|
||||
const defaultEndDate = format(addDays(new Date(Date.now()), 5), "yyyy-M-d");
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
|
||||
}
|
||||
|
||||
try {
|
||||
const res: any = await query(
|
||||
updatedQuery,
|
||||
"Get Delivery by date range"
|
||||
);
|
||||
deliverys = res.data;
|
||||
//console.log(res.data);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
message: "All Deliveries within the range.",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const res: any = await query(updatedQuery, "Get Delivery by date range");
|
||||
deliverys = res.data;
|
||||
//console.log(res.data);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
message: "All Deliveries within the range.",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
deliverys = deliverys.splice(1000, 0);
|
||||
}
|
||||
// add plant token in
|
||||
// const pOrders = deliverys.map((item: any) => {
|
||||
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
|
||||
// // month: "numeric",
|
||||
// // day: "numeric",
|
||||
// // year: "numeric",
|
||||
// // hour: "2-digit",
|
||||
// // minute: "2-digit",
|
||||
// // hour12: false,
|
||||
// // });
|
||||
// if (!data) {
|
||||
// deliverys = deliverys.splice(1000, 0);
|
||||
// }
|
||||
// add plant token in
|
||||
// const pOrders = deliverys.map((item: any) => {
|
||||
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
|
||||
// // month: "numeric",
|
||||
// // day: "numeric",
|
||||
// // year: "numeric",
|
||||
// // hour: "2-digit",
|
||||
// // minute: "2-digit",
|
||||
// // hour12: false,
|
||||
// // });
|
||||
|
||||
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
|
||||
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
|
||||
// return {
|
||||
// plantToken: plantToken[0].value,
|
||||
// ...item,
|
||||
// loadingDate: dateCon,
|
||||
// deliveryDate: delDate,
|
||||
// };
|
||||
// });
|
||||
return { success: true, message: "Current open orders", data: deliverys };
|
||||
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
|
||||
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
|
||||
// return {
|
||||
// plantToken: plantToken[0].value,
|
||||
// ...item,
|
||||
// loadingDate: dateCon,
|
||||
// deliveryDate: delDate,
|
||||
// };
|
||||
// });
|
||||
return {
|
||||
success: true,
|
||||
message: "Current open orders",
|
||||
data: deliverys.map((i: any) => {
|
||||
const orderDate = new Date(i.OrderDate);
|
||||
const delDate = new Date(i.DeliveryDate);
|
||||
const loadDate = new Date(i.LoadingDate);
|
||||
|
||||
return {
|
||||
...i,
|
||||
OrderDate: format(orderDate, "yyyy-MM-dd HH:mm"),
|
||||
DeliveryDate: format(delDate, "yyyy-MM-dd HH:mm"),
|
||||
LoadingDate: format(loadDate, "yyyy-MM-dd HH:mm"),
|
||||
dbDate: i.DeliveryDate,
|
||||
};
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,84 +1,97 @@
|
||||
import { addDays, format } from "date-fns";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import { deliveryByDateRangeAndAv } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js";
|
||||
import { addDays, format } from "date-fns";
|
||||
|
||||
export const getDeliveryByDateRangeAndAv = async (
|
||||
avs: string,
|
||||
startDate: string,
|
||||
endDate: string
|
||||
avs: string,
|
||||
startDate: string,
|
||||
endDate: string,
|
||||
) => {
|
||||
// const { data: plantToken, error: plantError } = await tryCatch(
|
||||
// db.select().from(settings).where(eq(settings.name, "plantToken"))
|
||||
// );
|
||||
// if (plantError) {
|
||||
// return {
|
||||
// success: false,
|
||||
// message: "Error getting Settings",
|
||||
// data: plantError,
|
||||
// };
|
||||
// }
|
||||
let deliverys: any = [];
|
||||
// const { data: plantToken, error: plantError } = await tryCatch(
|
||||
// db.select().from(settings).where(eq(settings.name, "plantToken"))
|
||||
// );
|
||||
// if (plantError) {
|
||||
// return {
|
||||
// success: false,
|
||||
// message: "Error getting Settings",
|
||||
// data: plantError,
|
||||
// };
|
||||
// }
|
||||
let deliverys: any = [];
|
||||
|
||||
let updatedQuery = deliveryByDateRangeAndAv;
|
||||
let updatedQuery = deliveryByDateRangeAndAv;
|
||||
|
||||
// start days can be sent over
|
||||
if (startDate) {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", startDate);
|
||||
} else {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
|
||||
}
|
||||
// start days can be sent over
|
||||
if (startDate) {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", startDate);
|
||||
} else {
|
||||
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
|
||||
}
|
||||
|
||||
// end days can be sent over
|
||||
if (endDate) {
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", endDate);
|
||||
} else {
|
||||
const defaultEndDate = format(
|
||||
addDays(new Date(Date.now()), 5),
|
||||
"yyyy-M-d"
|
||||
);
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
|
||||
}
|
||||
// end days can be sent over
|
||||
if (endDate) {
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", endDate);
|
||||
} else {
|
||||
const defaultEndDate = format(addDays(new Date(Date.now()), 5), "yyyy-M-d");
|
||||
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
|
||||
}
|
||||
|
||||
try {
|
||||
const res: any = await query(
|
||||
updatedQuery.replace("[articles]", avs),
|
||||
"Get Delivery by date range"
|
||||
);
|
||||
deliverys = res.data;
|
||||
//console.log(res.data);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
message: "All Deliveries within the range.",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const res: any = await query(
|
||||
updatedQuery.replace("[articles]", avs),
|
||||
"Get Delivery by date range",
|
||||
);
|
||||
deliverys = res.data;
|
||||
//console.log(res.data);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
message: "All Deliveries within the range.",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
// if (!data) {
|
||||
// deliverys = deliverys.splice(1000, 0);
|
||||
// }
|
||||
// add plant token in
|
||||
// const pOrders = deliverys.map((item: any) => {
|
||||
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
|
||||
// // month: "numeric",
|
||||
// // day: "numeric",
|
||||
// // year: "numeric",
|
||||
// // hour: "2-digit",
|
||||
// // minute: "2-digit",
|
||||
// // hour12: false,
|
||||
// // });
|
||||
// if (!data) {
|
||||
// deliverys = deliverys.splice(1000, 0);
|
||||
// }
|
||||
// add plant token in
|
||||
// const pOrders = deliverys.map((item: any) => {
|
||||
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
|
||||
// // month: "numeric",
|
||||
// // day: "numeric",
|
||||
// // year: "numeric",
|
||||
// // hour: "2-digit",
|
||||
// // minute: "2-digit",
|
||||
// // hour12: false,
|
||||
// // });
|
||||
|
||||
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
|
||||
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
|
||||
// return {
|
||||
// plantToken: plantToken[0].value,
|
||||
// ...item,
|
||||
// loadingDate: dateCon,
|
||||
// deliveryDate: delDate,
|
||||
// };
|
||||
// });
|
||||
return { success: true, message: "Current open orders", data: deliverys };
|
||||
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
|
||||
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
|
||||
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
|
||||
// return {
|
||||
// plantToken: plantToken[0].value,
|
||||
// ...item,
|
||||
// loadingDate: dateCon,
|
||||
// deliveryDate: delDate,
|
||||
// };
|
||||
// });
|
||||
return {
|
||||
success: true,
|
||||
message: "Current open orders",
|
||||
data: deliverys.map((i: any) => {
|
||||
const orderDate = new Date(i.OrderDate);
|
||||
const delDate = new Date(i.DeliveryDate);
|
||||
const loadDate = new Date(i.LoadingDate);
|
||||
|
||||
return {
|
||||
...i,
|
||||
OrderDate: format(orderDate, "yyyy-MM-dd HH:mm"),
|
||||
DeliveryDate: format(delDate, "yyyy-MM-dd HH:mm"),
|
||||
LoadingDate: format(loadDate, "yyyy-MM-dd HH:mm"),
|
||||
dbDate: i.DeliveryDate,
|
||||
};
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { format } from "date-fns-tz/format";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
@@ -37,6 +38,15 @@ export const getGetPSIForecastData = async (customer: string) => {
|
||||
return {
|
||||
success: true,
|
||||
message: "PSI forecast Data",
|
||||
data: articles,
|
||||
data: articles.map((i: any) => {
|
||||
const requirementDate = new Date(i.requirementDate);
|
||||
|
||||
return {
|
||||
...i,
|
||||
requirementDate: format(requirementDate, "yyyy-MM-dd"),
|
||||
|
||||
dbDate: i.requirementDate,
|
||||
};
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -2,62 +2,72 @@ import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js";
|
||||
import { improvedPsiPlanningInfo } from "./psiPlanningDataImproved.js";
|
||||
|
||||
// type ArticleData = {
|
||||
// id: string
|
||||
// }
|
||||
export const psiGetPlanningData = async (
|
||||
avs: string,
|
||||
startDate: string,
|
||||
endDate: string
|
||||
avs: string,
|
||||
startDate: string,
|
||||
endDate: string,
|
||||
) => {
|
||||
let articles: any = [];
|
||||
let articles: any = [];
|
||||
|
||||
if (!avs) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Missing av's please send at least one over`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
if (!avs) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Missing av's please send at least one over`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
const { data, error } = (await tryCatch(
|
||||
query(
|
||||
planningNumbersByAVDate
|
||||
.replace("[articles]", avs)
|
||||
.replace("[startDate]", startDate)
|
||||
.replace("[endDate]", endDate),
|
||||
"PSI planning info"
|
||||
)
|
||||
)) as any;
|
||||
const { data, error } = (await tryCatch(
|
||||
query(
|
||||
planningNumbersByAVDate
|
||||
.replace("[articles]", avs)
|
||||
.replace("[startDate]", startDate)
|
||||
.replace("[endDate]", endDate),
|
||||
"PSI planning info",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"datamart",
|
||||
"datamart",
|
||||
`There was an error getting the planning info: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
messsage: `There was an error getting the planning info`,
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
// improvedPsiPlanningInfo({
|
||||
// avs,
|
||||
// startDate,
|
||||
// endDate,
|
||||
// });
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"datamart",
|
||||
"datamart",
|
||||
`There was an error getting the planning info: ${JSON.stringify(error)}`,
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
messsage: `There was an error getting the planning info`,
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
articles = data.data;
|
||||
// TODO: if we are not running planning we no pass the old structure if we are running new planning use the below improved version that makes sure we dont have negative numebrs.
|
||||
articles = data.data;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "PSI planning Data",
|
||||
data: articles.map((n: any) => {
|
||||
if (n.PalDay) {
|
||||
return { ...n, PalDay: n.PalDay.toFixed(2) };
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
message: "PSI planning Data",
|
||||
data: await improvedPsiPlanningInfo({
|
||||
avs,
|
||||
startDate,
|
||||
endDate,
|
||||
}),
|
||||
// data: articles.map((n: any) => {
|
||||
// if (n.PalDay) {
|
||||
// return { ...n, PalDay: n.PalDay.toFixed(2) };
|
||||
// }
|
||||
|
||||
return n;
|
||||
}),
|
||||
};
|
||||
// return n;
|
||||
// }),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -0,0 +1,171 @@
|
||||
import { format } from "date-fns-tz";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
|
||||
const improvedQuery = `
|
||||
|
||||
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
|
||||
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
|
||||
|
||||
SELECT
|
||||
[RunningNumber] as lot
|
||||
,[ProfitCentreDescription]
|
||||
,[MachineDescription]
|
||||
,[ArticleHumanReadableId]
|
||||
,[ArticleDescription]
|
||||
,[DeliveryAddressHumanReadableId]
|
||||
,[DeliveryAddressDescription]
|
||||
,[MouldHumanReadableId]
|
||||
,[BlowheadHumanReadableId1]
|
||||
,[PackagingInstructionHumanReadableId]
|
||||
,[PackagingInstructionDescription]
|
||||
,[MainMaterialHumanReadableId]
|
||||
,[MainMaterialDescription]
|
||||
,[CompoundHumanReadableId]
|
||||
,[CompoundDescription]
|
||||
,[ProductionLotState]
|
||||
,[PlanType]
|
||||
,[ProducedQuantityLoadingUnit]
|
||||
,[ProducedQuantityPieces]
|
||||
,[PlanStart]
|
||||
,[PlanEnd]
|
||||
,[ProdStart]
|
||||
,[TheoreticEnd]
|
||||
,[ProdDuration]
|
||||
,[SetupDuration]
|
||||
,[StartupDuration]
|
||||
|
||||
,[NetEquipmentEfficiency]
|
||||
,[UtilisationDuration]
|
||||
,[CycleTime]
|
||||
,[Cavities]
|
||||
,[FixedQuantity]
|
||||
,[ProducedQuantityTrucks]
|
||||
,[ProducedQuantityTradeUnit]
|
||||
,[MaxRegrind]
|
||||
,[Conflict]
|
||||
,[ProductionOrderHumanReadableId]
|
||||
,[ProductionDataImportSource]
|
||||
,[Remark]
|
||||
,[BlowheadDescription1]
|
||||
,[MouldDescription]
|
||||
,[ProcessLossPercentage]
|
||||
,[SetupTypeNumberOfPersons]
|
||||
,[UnplannedDowntimePercentage]
|
||||
,[PlanQuantityLoadingUnit]
|
||||
,[PlanQuantityPieces]
|
||||
,[PlanQuantityTradeUnit]
|
||||
,[PlanQuantityTrucks]
|
||||
,[PublishState]
|
||||
,[LastChange]
|
||||
,[MaterialConsumed]
|
||||
,[MaterialStaged]
|
||||
,[MachineLocation]
|
||||
,[HasPrioritization]
|
||||
,[ArticleAlias]
|
||||
|
||||
FROM [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] with (nolock)
|
||||
where PlanEnd between @StartDate and @EndDate
|
||||
and ArticleHumanReadableId in ([articles])
|
||||
and PublishState = 1
|
||||
order by PlanStart
|
||||
|
||||
`;
|
||||
export const improvedPsiPlanningInfo = async (something: any) => {
|
||||
const { data, error } = (await tryCatch(
|
||||
query(
|
||||
improvedQuery
|
||||
.replace("[articles]", something.avs)
|
||||
.replace("[startDate]", something.startDate)
|
||||
.replace("[endDate]", something.endDate),
|
||||
"PSI planning info",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
// add error handling in later here
|
||||
|
||||
return splitProduction(data.data);
|
||||
};
|
||||
|
||||
const splitProduction = (runs: any) => {
|
||||
const results: any = [];
|
||||
const WORKDAY_START_HOUR = 7; // 07:00 start well later get this from the shift def
|
||||
|
||||
runs.forEach((e: any) => {
|
||||
const {
|
||||
PlanStart,
|
||||
PlanEnd,
|
||||
PlanQuantityPieces,
|
||||
ArticleHumanReadableId,
|
||||
ProdDuration,
|
||||
} = e;
|
||||
|
||||
const prodStart: any = new Date(PlanStart);
|
||||
const prodEnd: any = new Date(PlanEnd);
|
||||
const prodDuration = ProdDuration
|
||||
? ProdDuration * 60 * 60 * 1000
|
||||
: prodEnd - prodStart;
|
||||
|
||||
// get the prod date the production falls under
|
||||
function getProdDayStart(date: Date) {
|
||||
const d = new Date(date);
|
||||
d.setHours(WORKDAY_START_HOUR, 0, 0, 0);
|
||||
|
||||
if (date.getHours() < WORKDAY_START_HOUR) {
|
||||
// before 07:00, belongs to previous calendar day
|
||||
d.setDate(d.getDate() - 1);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
// current pointer starts at the work-day start that contains our start time
|
||||
let currentStart = new Date(prodStart);
|
||||
let prodDayStart = getProdDayStart(currentStart);
|
||||
|
||||
while (prodDayStart < prodEnd) {
|
||||
// 1️⃣ The next day’s start = prodDayStart + 1 day at 07:00
|
||||
const nextProdDayStart = new Date(prodDayStart);
|
||||
nextProdDayStart.setDate(nextProdDayStart.getDate() + 1);
|
||||
|
||||
// 2️⃣ Segment end is either the next work-day start or the actual end, whichever is sooner
|
||||
const segmentEnd = new Date(
|
||||
Math.min(nextProdDayStart.getTime(), prodEnd.getTime()),
|
||||
);
|
||||
|
||||
// 3️⃣ Determine overlap window within (startTime..endTime)
|
||||
const segStart: any = new Date(
|
||||
Math.max(prodDayStart.getTime(), prodStart.getTime()),
|
||||
);
|
||||
const segEnd: any = segmentEnd;
|
||||
|
||||
if (segEnd > segStart) {
|
||||
const segMs = segEnd - segStart;
|
||||
const proportion = segMs / prodDuration;
|
||||
const qty = PlanQuantityPieces * proportion;
|
||||
const pal = e.PlanQuantityLoadingUnit * proportion;
|
||||
|
||||
results.push({
|
||||
Article: ArticleHumanReadableId,
|
||||
Description: e.ArticleAlias,
|
||||
MachineId: e.MachineLocation,
|
||||
MachineName: e.MachineDescription,
|
||||
LotNumber: e.lot,
|
||||
ProductionDay: format(prodDayStart, "M/d/yyyy"),
|
||||
TotalPlanned: e.PlanQuantityPieces,
|
||||
// PlanEnd,
|
||||
// TheoreticEnd,
|
||||
QTYPerDay: parseInt(qty.toFixed(0)),
|
||||
PalDay: parseFloat(pal.toFixed(2)),
|
||||
finished: e.ProductionLotState === 3 ? 1 : 0,
|
||||
cavities: e.Cavities,
|
||||
//prodDuration,
|
||||
});
|
||||
}
|
||||
|
||||
// move to next production-day window
|
||||
prodDayStart = nextProdDayStart;
|
||||
}
|
||||
});
|
||||
|
||||
return results;
|
||||
};
|
||||
@@ -145,7 +145,7 @@ app.openapi(
|
||||
return c.json({
|
||||
success: true,
|
||||
message: "All Current Active Querys.",
|
||||
sheetVersion: 2.8,
|
||||
sheetVersion: 2.8, // TODO: when this gets switched change this
|
||||
data: current,
|
||||
});
|
||||
},
|
||||
|
||||
@@ -63,10 +63,10 @@ setTimeout(async () => {
|
||||
// the time we want to run the hostircal data should be the same time the historical data run on the server
|
||||
// getting this from the shift time
|
||||
|
||||
if (process.env.NODE_ENV?.trim() !== "production") {
|
||||
setTimeout(() => {
|
||||
historicalInvIMmport();
|
||||
}, 15 * 1000);
|
||||
}
|
||||
//if (process.env.NODE_ENV?.trim() !== "production") {
|
||||
setTimeout(() => {
|
||||
historicalInvIMmport();
|
||||
}, 15 * 1000);
|
||||
//}
|
||||
|
||||
export default app;
|
||||
|
||||
155
lstV2/server/services/logistics/controller/commands/bookout.ts
Normal file
155
lstV2/server/services/logistics/controller/commands/bookout.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import axios from "axios";
|
||||
import net from "net";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { commandLog } from "../../../../../database/schema/commandLog.js";
|
||||
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { scanner } from "../../../../globalUtils/scannerConnect.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
|
||||
|
||||
type Data = {
|
||||
runningNr: number;
|
||||
reason: string;
|
||||
user: string;
|
||||
};
|
||||
export const bookOutPallet = async (data: Data) => {
|
||||
const { runningNr, reason, user } = data;
|
||||
|
||||
if (!reason || reason.length < 4) {
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: "The reason provided is to short",
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
const queryCheck = sqlQuerySelector("inventoryInfo.query");
|
||||
|
||||
if (!queryCheck.success) {
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: queryCheck.message,
|
||||
data: data,
|
||||
};
|
||||
}
|
||||
const { data: label, error: labelError } = (await tryCatch(
|
||||
query(
|
||||
queryCheck.query!.replace("[runningNr]", `${runningNr}`),
|
||||
"labelQuery",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
if (labelError) {
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: labelError.message,
|
||||
data: labelError,
|
||||
};
|
||||
}
|
||||
|
||||
// check if we are in ppoo
|
||||
if (label.data.length <= 0) {
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: `${runningNr} is not currently in ppoo, please move to ppoo before trying to book-out`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
// check if the label is blocked for coa.
|
||||
if (
|
||||
label.data[0].blockingReason &&
|
||||
!label.data[0].blockingReason?.includes("COA")
|
||||
) {
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: `${runningNr} is not currently blocked for coa, to get this pallet booked out please take the label to quality to be released then you can book-out.`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
if (label.data[0].blockingReason) {
|
||||
await scanner.scan("AlplaPRODcmd89");
|
||||
await scanner.scan(`${label.data[0].barcode}`);
|
||||
}
|
||||
|
||||
// create the url to post
|
||||
const url = await prodEndpointCreation(
|
||||
"/public/v1.1/Manufacturing/ProductionControlling/BookOut",
|
||||
);
|
||||
const SSCC = await createSSCC(runningNr);
|
||||
|
||||
const bookOutData = {
|
||||
sscc: SSCC.slice(2),
|
||||
scannerId: "666",
|
||||
};
|
||||
|
||||
try {
|
||||
const results = await axios.post(url, bookOutData, {
|
||||
headers: {
|
||||
"X-API-Key": process.env.TEC_API_KEY || "",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
if (results.data.Errors) {
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: results.data.Errors.Error.Description,
|
||||
};
|
||||
}
|
||||
|
||||
// if (results.data.Result !== 0) {
|
||||
// console.log("stopping here and closing to soon", results);
|
||||
// return {
|
||||
// success: false,
|
||||
// status: 400,
|
||||
// message: results.data.Message,
|
||||
// };
|
||||
// }
|
||||
|
||||
const { data: commandL, error: ce } = await tryCatch(
|
||||
db.insert(commandLog).values({
|
||||
commandUsed: "book out",
|
||||
bodySent: data,
|
||||
reasonUsed: reason,
|
||||
}),
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `${runningNr} was booked out`,
|
||||
status: results.status,
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.log(bookOutData);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: error.response?.data,
|
||||
data: error.response?.data,
|
||||
};
|
||||
}
|
||||
|
||||
// });
|
||||
|
||||
/**
|
||||
* book out the label with
|
||||
* url /public/v1.1/Manufacturing/ProductionControlling/BookOut
|
||||
* {
|
||||
* "sscc": "string",
|
||||
* "scannerId": "string"
|
||||
* }
|
||||
*/
|
||||
//---------------------------------------------------------------------------------------\\
|
||||
};
|
||||
@@ -0,0 +1,96 @@
|
||||
import axios from "axios";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { commandLog } from "../../../../../database/schema/commandLog.js";
|
||||
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
|
||||
type Data = {
|
||||
runningNr: number;
|
||||
laneID: number;
|
||||
};
|
||||
export const relatePallet = async (data: Data) => {
|
||||
const { runningNr, laneID } = data;
|
||||
// replace the rn
|
||||
|
||||
// console.log(data);
|
||||
// create the url to post
|
||||
|
||||
// do we have warehousing turned on?
|
||||
const { data: feature, error: featureError } = (await tryCatch(
|
||||
query(
|
||||
`SELECT [Id]
|
||||
,[Feature]
|
||||
,[Enabled]
|
||||
,[ActivationDate]
|
||||
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
|
||||
"feature switch check",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
let prodUrl = "/public/v1.0/Warehousing/Relocate";
|
||||
if (featureError) {
|
||||
prodUrl = "/public/v1.0/Warehousing/Relocate";
|
||||
}
|
||||
|
||||
if (feature?.data.length > 0) {
|
||||
prodUrl = "/public/v1.1/Warehousing/Unit/Relocate";
|
||||
}
|
||||
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
|
||||
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
|
||||
|
||||
let url = await prodEndpointCreation(prodUrl);
|
||||
|
||||
const SSCC = await createSSCC(runningNr);
|
||||
const consumeSomething = {
|
||||
ScannerId: 999,
|
||||
laneId: laneID,
|
||||
sscc: SSCC.slice(2),
|
||||
};
|
||||
|
||||
console.log(consumeSomething);
|
||||
try {
|
||||
const results = await axios.post(url, consumeSomething, {
|
||||
headers: {
|
||||
"X-API-Key": process.env.TEC_API_KEY || "",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
if (results.data.Errors) {
|
||||
return {
|
||||
success: false,
|
||||
message: results.data.Errors.Error.Description,
|
||||
};
|
||||
}
|
||||
|
||||
if (results.data.Result !== 0 || results.data.data.length <= 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: results.data.Message,
|
||||
};
|
||||
}
|
||||
|
||||
const { data: commandL, error: ce } = await tryCatch(
|
||||
db.insert(commandLog).values({
|
||||
commandUsed: "relocate",
|
||||
bodySent: data,
|
||||
}),
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Pallet Was Relocated",
|
||||
status: results.status,
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
status: 200,
|
||||
message: error.response?.data.errors[0].message,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -1,120 +1,50 @@
|
||||
import axios from "axios";
|
||||
import { commandLog } from "../../../../../database/schema/commandLog.js";
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { lstAuth } from "../../../../index.js";
|
||||
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import net from "net";
|
||||
import { commandLog } from "../../../../../database/schema/commandLog.js";
|
||||
import { scanner } from "../../../../globalUtils/scannerConnect.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
|
||||
import { settings } from "../../../../../database/schema/settings.js";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { serverData } from "../../../../../database/schema/serverData.js";
|
||||
|
||||
export const removeAsNonReusable = async (data: any) => {
|
||||
// const removalUrl = await prodEndpointCreation(
|
||||
// "/public/v1.0/Warehousing/RemoveAsNonReusableMaterial"
|
||||
// );
|
||||
// get the label info
|
||||
const { data: label, error: labelError } = (await tryCatch(
|
||||
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info"),
|
||||
)) as any;
|
||||
|
||||
// const sscc = await createSSCC(data.runningNr);
|
||||
if (label.data[0].stockStatus === "notOnStock") {
|
||||
return {
|
||||
success: false,
|
||||
message: `The label: ${data.runningNr} is not currently in stock`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
// const { data: remove, error } = await tryCatch(
|
||||
// axios.post(
|
||||
// removalUrl,
|
||||
// { scannerId: "500", sscc: sscc.slice(2) },
|
||||
// {
|
||||
// headers: { Authorization: `Basic ${lstAuth}` },
|
||||
// }
|
||||
// )
|
||||
// );
|
||||
if (label.data[0].blockingReason) {
|
||||
return {
|
||||
success: false,
|
||||
status: 400,
|
||||
message: `${data.runningNr} is currently blocked, to get this pallet removed please take the label to quality to be released then you can remove.`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
// use a scanner tcp connection to trigger this process
|
||||
const STX = "\x02";
|
||||
const ETX = "\x03";
|
||||
const scanner = new net.Socket();
|
||||
let stage = 0;
|
||||
// get the label info
|
||||
const { data: label, error: labelError } = (await tryCatch(
|
||||
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info")
|
||||
)) as any;
|
||||
await scanner.scan("AlplaPRODcmd23");
|
||||
await scanner.scan(`${label.data[0].barcode}`);
|
||||
|
||||
if (label.data[0].stockStatus === "notOnStock") {
|
||||
return {
|
||||
success: false,
|
||||
message: `The label: ${data.runningNr} is not currently in stock`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
let reason = data.reason || "";
|
||||
delete data.reason;
|
||||
|
||||
// get the server ip based on the token.
|
||||
const setting = await db.select().from(settings);
|
||||
const { data: commandL, error: ce } = await tryCatch(
|
||||
db.insert(commandLog).values({
|
||||
commandUsed: "removeAsNonReusable",
|
||||
bodySent: data,
|
||||
reasonUsed: reason,
|
||||
}),
|
||||
);
|
||||
|
||||
const plantInfo = await db.select().from(serverData);
|
||||
const plantToken = setting.filter((n: any) => n.name === "plantToken");
|
||||
const scannerID = setting.filter((n: any) => n.name === "scannerID");
|
||||
const scannerPort = setting.filter((n: any) => n.name === "scannerPort");
|
||||
const plantData = plantInfo.filter(
|
||||
(p: any) => p.plantToken === plantToken[0].value
|
||||
);
|
||||
|
||||
scanner.connect(
|
||||
parseInt(scannerPort[0].value),
|
||||
plantData[0].idAddress!,
|
||||
async () => {
|
||||
// need to get the ip from the server data and scanner port
|
||||
//console.log(`connected to scanner`);
|
||||
scanner.write(`${STX}${scannerID[0].value}@AlplaPRODcmd23${ETX}`);
|
||||
}
|
||||
);
|
||||
scanner.on("data", (data) => {
|
||||
const response = data.toString();
|
||||
//console.log("Received:", response.trimStart());
|
||||
if (stage === 0) {
|
||||
stage = 1;
|
||||
scanner.write(
|
||||
`${STX}${scannerID[0].value}@${label.data[0].Barcode}${ETX}`
|
||||
);
|
||||
} else if (stage === 1) {
|
||||
scanner.end();
|
||||
}
|
||||
});
|
||||
scanner.on("close", () => {
|
||||
//console.log("Connection closed");
|
||||
scanner.destroy();
|
||||
});
|
||||
scanner.on("error", (err) => {
|
||||
//console.error("Scanner error:", err);
|
||||
scanner.destroy();
|
||||
return {
|
||||
success: false,
|
||||
message: `The label: ${data.runningNr} encountering an error while being removed, please try again`,
|
||||
data: [],
|
||||
};
|
||||
});
|
||||
|
||||
// if (error) {
|
||||
// //console.log(error);
|
||||
// return {
|
||||
// success: false,
|
||||
// message: `There was an error removing ${data.runningNr}`,
|
||||
// data: [],
|
||||
// };
|
||||
// }
|
||||
|
||||
let reason = data.reason || "";
|
||||
delete data.reason;
|
||||
|
||||
const { data: commandL, error: ce } = await tryCatch(
|
||||
db.insert(commandLog).values({
|
||||
commandUsed: "removeAsNonReusable",
|
||||
bodySent: data,
|
||||
reasonUsed: reason,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `The label: ${data.runningNr}, was removed`,
|
||||
data: [],
|
||||
};
|
||||
return {
|
||||
success: true,
|
||||
message: `The label: ${data.runningNr}, was removed`,
|
||||
data: [],
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { addDays, addHours, isAfter, parse } from "date-fns";
|
||||
import { addDays, addHours, isAfter, parse, subDays } from "date-fns";
|
||||
import { format } from "date-fns-tz";
|
||||
import XLSX from "xlsx";
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
@@ -94,6 +94,10 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
};
|
||||
const oOrders: any = openOrders;
|
||||
//console.log(orderData);
|
||||
|
||||
function trimAll(str: string) {
|
||||
return str.replace(/\s+/g, "");
|
||||
}
|
||||
let correctedOrders: any = orderData
|
||||
.filter(
|
||||
(o: any) =>
|
||||
@@ -103,9 +107,9 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
.map((o: any) => ({
|
||||
date: excelDateStuff(o.date, o.time),
|
||||
po:
|
||||
o.newton8oz.replace(/\s+/g, "") !== ""
|
||||
? o.newton8oz.replace(/\s+/g, "")
|
||||
: o.newton10oz.replace(/\s+/g, ""),
|
||||
trimAll(o.newton8oz) !== ""
|
||||
? trimAll(o.newton8oz)
|
||||
: o.newton10oz.replace(/[\s\u00A0]+/g, ""),
|
||||
customerArticlenumber:
|
||||
o.newton8oz != ""
|
||||
? a.filter((a: any) => a.av === 118)[0].CustomerArticleNumber
|
||||
@@ -116,24 +120,29 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
: a.filter((a: any) => a.av === 120)[0].totalTruckLoad,
|
||||
}));
|
||||
|
||||
//console.log(correctedOrders);
|
||||
// now we want to make sure we only correct orders that or after now
|
||||
correctedOrders = correctedOrders.filter((o: any) => {
|
||||
const parsedDate = parse(o.date, "M/d/yyyy, h:mm:ss a", new Date());
|
||||
return isAfter(o.date, new Date().toISOString());
|
||||
return isAfter(new Date(o.date), new Date().toISOString());
|
||||
});
|
||||
|
||||
//console.log(correctedOrders);
|
||||
// last map to remove orders that have already been started
|
||||
// correctedOrders = correctedOrders.filter((oo: any) =>
|
||||
// oOrders.some((o: any) => o.CustomerOrderNumber === oo.po)
|
||||
// );
|
||||
let postedOrders: any = [];
|
||||
const filterOrders: any = correctedOrders;
|
||||
|
||||
//console.log(filterOrders);
|
||||
|
||||
filterOrders.forEach((oo: any) => {
|
||||
const isMatch = openOrders.some(
|
||||
(o: any) => String(o.po).trim() === String(oo.po).trim(),
|
||||
);
|
||||
//console.log(isMatch, oo.po);
|
||||
if (!isMatch) {
|
||||
//console.log(`ok to update: ${oo.po}`);
|
||||
console.log(`ok to update: ${oo.po}`);
|
||||
|
||||
// oo = {
|
||||
// ...oo,
|
||||
@@ -141,7 +150,7 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
// };
|
||||
postedOrders.push(oo);
|
||||
} else {
|
||||
// console.log(`Not valid order to update: ${oo.po}`);
|
||||
//console.log(`Not valid order to update: ${oo.po}`);
|
||||
//console.log(oo)
|
||||
}
|
||||
});
|
||||
@@ -159,7 +168,7 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
deliveryAddressId: 8,
|
||||
customerArticleNo: o.customerArticlenumber,
|
||||
quantity: o.qty,
|
||||
deliveryDate: addHours(format(o.date, "M/d/yyyy HH:mm"), 1), //addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
|
||||
deliveryDate: format(o.date, "M/d/yyyy HH:mm"), // addHours(format(o.date, "M/d/yyyy HH:mm"), 1), //addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
|
||||
customerLineItemNo: 1, // this is how it is currently sent over from abbott
|
||||
customerReleaseNo: 1, // same as above
|
||||
},
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import axios from "axios";
|
||||
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
|
||||
export const postAdjustment = async (data: any) => {
|
||||
if (data.warehouseId === undefined) {
|
||||
@@ -35,9 +36,30 @@ export const postAdjustment = async (data: any) => {
|
||||
quantity: data.quantity,
|
||||
};
|
||||
|
||||
let url = await prodEndpointCreation(
|
||||
"/public/v1.0/Warehousing/AdjustSiloStockLevel",
|
||||
);
|
||||
// do we have warehousing turned on?
|
||||
const { data: feature, error: featureError } = (await tryCatch(
|
||||
query(
|
||||
`SELECT [Id]
|
||||
,[Feature]
|
||||
,[Enabled]
|
||||
,[ActivationDate]
|
||||
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
|
||||
"feature switch check",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
let prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
|
||||
if (featureError) {
|
||||
prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
|
||||
}
|
||||
|
||||
if (feature?.data.length > 0) {
|
||||
prodUrl = "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel";
|
||||
}
|
||||
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
|
||||
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
|
||||
|
||||
let url = await prodEndpointCreation(prodUrl);
|
||||
|
||||
const { data: silo, error } = await tryCatch(
|
||||
axios.post(url, siloAdjustment, {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
|
||||
import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js";
|
||||
import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js";
|
||||
import attachSilo from "./route/attachSilo.js";
|
||||
import bookOutPallet from "./route/bookout.js";
|
||||
import comsumeMaterial from "./route/consumeMaterial.js";
|
||||
import detachSilo from "./route/detachSilo.js";
|
||||
import postBulkOrders from "./route/dm/bulkOrdersIn.js";
|
||||
@@ -16,6 +18,7 @@ import outbound from "./route/getOutbound.js";
|
||||
import getPPOO from "./route/getPPOO.js";
|
||||
import getConnectionType from "./route/getSiloConnectionData.js";
|
||||
import getSSCC from "./route/getSSCCNumber.js";
|
||||
import relocate from "./route/relocate.js";
|
||||
import removeAsNonReable from "./route/removeAsNonReusable.js";
|
||||
import returnMat from "./route/returnMaterial.js";
|
||||
import createSiloAdjustment from "./route/siloAdjustments/createSiloAdjustment.js";
|
||||
@@ -28,7 +31,7 @@ const app = new OpenAPIHono();
|
||||
const routes = [
|
||||
comsumeMaterial,
|
||||
returnMat,
|
||||
|
||||
relocate,
|
||||
// silo
|
||||
createSiloAdjustment,
|
||||
postComment,
|
||||
@@ -55,6 +58,7 @@ const routes = [
|
||||
// logisitcs
|
||||
removeAsNonReable,
|
||||
getSSCC,
|
||||
bookOutPallet,
|
||||
] as const;
|
||||
|
||||
// app.route("/server", modules);
|
||||
|
||||
87
lstV2/server/services/logistics/route/bookout.ts
Normal file
87
lstV2/server/services/logistics/route/bookout.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { verify } from "hono/jwt";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
//import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
|
||||
import { bookOutPallet } from "../controller/commands/bookout.js";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const responseSchema = z.object({
|
||||
success: z.boolean().optional().openapi({ example: true }),
|
||||
message: z.string().optional().openapi({ example: "user access" }),
|
||||
});
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["logistics"],
|
||||
summary: "Consumes material based on its running number",
|
||||
method: "post",
|
||||
path: "/bookout",
|
||||
//middleware: authMiddleware,
|
||||
description:
|
||||
"Provided a running number and lot number you can consume material.",
|
||||
responses: {
|
||||
200: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "stopped",
|
||||
},
|
||||
400: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "Failed to stop",
|
||||
},
|
||||
401: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "Failed to stop",
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const { data, error } = await tryCatch(c.req.json());
|
||||
|
||||
if (error) {
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "Missing data please try again",
|
||||
error,
|
||||
},
|
||||
400,
|
||||
);
|
||||
}
|
||||
apiHit(c, { endpoint: "/bookout", lastBody: data });
|
||||
//const authHeader = c.req.header("Authorization");
|
||||
//const token = authHeader?.split("Bearer ")[1] || "";
|
||||
|
||||
//const payload = await verify(token, process.env.JWT_SECRET!);
|
||||
try {
|
||||
//return apiReturn(c, true, access?.message, access?.data, 200);
|
||||
|
||||
//const pointData = { ...data, user: payload.user };
|
||||
|
||||
const bookout = await bookOutPallet(data);
|
||||
|
||||
console.log("from booout:", bookout);
|
||||
return c.json(
|
||||
{
|
||||
success: bookout?.success,
|
||||
message: bookout?.message,
|
||||
data: bookout.data,
|
||||
},
|
||||
200,
|
||||
);
|
||||
} catch (error) {
|
||||
console.log("from error:", error);
|
||||
//return apiReturn(c, false, "Error in setting the user access", error, 400);
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "Missing data please try again",
|
||||
error,
|
||||
},
|
||||
400,
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
export default app;
|
||||
80
lstV2/server/services/logistics/route/relocate.ts
Normal file
80
lstV2/server/services/logistics/route/relocate.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
|
||||
import { relatePallet } from "../controller/commands/relocated.js";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const responseSchema = z.object({
|
||||
success: z.boolean().optional().openapi({ example: true }),
|
||||
message: z.string().optional().openapi({ example: "user access" }),
|
||||
});
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["logistics"],
|
||||
summary: "Consumes material based on its running number",
|
||||
method: "post",
|
||||
path: "/relocate",
|
||||
//middleware: authMiddleware,
|
||||
description:
|
||||
"Provided a running number and lot number you can consume material.",
|
||||
responses: {
|
||||
200: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "stopped",
|
||||
},
|
||||
400: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "Failed to stop",
|
||||
},
|
||||
401: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "Failed to stop",
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const { data, error } = await tryCatch(c.req.json());
|
||||
|
||||
if (error) {
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "Missing data please try again",
|
||||
error,
|
||||
},
|
||||
400,
|
||||
);
|
||||
}
|
||||
apiHit(c, { endpoint: "/relocate", lastBody: data });
|
||||
//const authHeader = c.req.header("Authorization");
|
||||
//const token = authHeader?.split("Bearer ")[1] || "";
|
||||
|
||||
//const payload = await verify(token, process.env.JWT_SECRET!);
|
||||
try {
|
||||
//return apiReturn(c, true, access?.message, access?.data, 200);
|
||||
|
||||
const consume = await relatePallet(data);
|
||||
|
||||
console.log(consume);
|
||||
return c.json(
|
||||
{ success: consume?.success, message: consume?.message },
|
||||
200,
|
||||
);
|
||||
} catch (error) {
|
||||
//console.log(error);
|
||||
//return apiReturn(c, false, "Error in setting the user access", error, 400);
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "Missing data please try again",
|
||||
error,
|
||||
},
|
||||
400,
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
export default app;
|
||||
@@ -4,95 +4,92 @@ import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
|
||||
const notification = async (notifyData: any) => {
|
||||
/**
|
||||
* Pass the entire notification over
|
||||
*/
|
||||
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
|
||||
/**
|
||||
* Pass the entire notification over
|
||||
*/
|
||||
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
|
||||
|
||||
// validate if there are any emails.
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
// validate if there are any emails.
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
//let labels: Labels[];
|
||||
//let labels: Labels[];
|
||||
|
||||
const { data: l, error: labelError } = await tryCatch(
|
||||
query(
|
||||
bow2incoming.replace(
|
||||
"[time]",
|
||||
notifyData.notifiySettings.processTime
|
||||
),
|
||||
"Label Reprints"
|
||||
)
|
||||
);
|
||||
const labels: any = l?.data as any;
|
||||
if (labelError) {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
`Failed to get the labels: ${labelError}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
const { data: l, error: labelError } = await tryCatch(
|
||||
query(
|
||||
bow2incoming.replace("[time]", notifyData.notifiySettings.processTime),
|
||||
"Label Reprints",
|
||||
),
|
||||
);
|
||||
const labels: any = l?.data as any;
|
||||
if (labelError) {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
`Failed to get the labels: ${labelError}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (labels.length > 0) {
|
||||
//send the email :D
|
||||
const emailSetup = {
|
||||
email: notifyData.emails,
|
||||
subject: "Alert! New incoming goods has been received",
|
||||
template: "bow2IncomingGoods",
|
||||
context: {
|
||||
items: labels,
|
||||
time: notifyData.notifiySettings.processTime,
|
||||
},
|
||||
};
|
||||
if (labels.length > 0) {
|
||||
//send the email :D
|
||||
const emailSetup = {
|
||||
email: notifyData.emails,
|
||||
subject: "Alert! New incoming goods has been received",
|
||||
template: "bow2IncomingGoods",
|
||||
context: {
|
||||
items: labels,
|
||||
time: notifyData.notifiySettings.processTime,
|
||||
},
|
||||
};
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval"
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// // update the last time we ran and the prod id
|
||||
// const notifUpdate = {
|
||||
// prodID: labels[0].IdEtikettenHistorie,
|
||||
// lastRan: nowDate(),
|
||||
// };
|
||||
// // update the last time we ran and the prod id
|
||||
// const notifUpdate = {
|
||||
// prodID: labels[0].IdEtikettenHistorie,
|
||||
// lastRan: nowDate(),
|
||||
// };
|
||||
|
||||
// update the last time ran
|
||||
// update the last time ran
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...notifyData.notifiySettings,
|
||||
prodID: labels[0].IdEtikettenHistorie,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name))
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...notifyData.notifiySettings,
|
||||
prodID: labels[0].IdEtikettenHistorie,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name)),
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
export default notification;
|
||||
|
||||
@@ -0,0 +1,108 @@
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../../../sqlServer/utils/querySelector.utils.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
|
||||
export interface Labels {
|
||||
IdEtikettenHistorie?: number;
|
||||
}
|
||||
const notification = async (notifyData: any) => {
|
||||
/**
|
||||
* Pass the entire notification over
|
||||
*/
|
||||
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
|
||||
|
||||
// validate if there are any emails.
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
const cycleCountCheck = sqlQuerySelector("cycleCountCheck.query") as SqlQuery;
|
||||
|
||||
if (!cycleCountCheck.success) {
|
||||
console.log("Failed to load the query: ", cycleCountCheck.message);
|
||||
return;
|
||||
}
|
||||
|
||||
const { data: c, error: cError } = await tryCatch(
|
||||
query(
|
||||
cycleCountCheck.query.replace("[timeTest]", notifyData.checkInterval),
|
||||
"Cycle count check",
|
||||
),
|
||||
);
|
||||
const cycle: any = c?.data ?? ([] as any);
|
||||
|
||||
//console.log(cycle);
|
||||
|
||||
if (cError) {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
`Failed to get the labels: ${cError}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (cycle.length > 0) {
|
||||
//send the email :D
|
||||
const emailSetup = {
|
||||
email: notifyData.emails,
|
||||
subject: `Alert! RowBlocked for more than ${notifyData.checkInterval} min(s)`,
|
||||
template: "cycleCountCheck",
|
||||
context: {
|
||||
checkTime: notifyData.checkInterval,
|
||||
items: cycle,
|
||||
},
|
||||
};
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"reprinting",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// // update the last time we ran and the prod id
|
||||
// const notifUpdate = {
|
||||
// prodID: labels[0].IdEtikettenHistorie,
|
||||
// lastRan: nowDate(),
|
||||
// };
|
||||
|
||||
// update the last time ran
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
// notifiySettings: {
|
||||
// ...notifyData.notifiySettings,
|
||||
// prodID: labels[0].IdEtikettenHistorie,
|
||||
// },
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name)),
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
export default notification;
|
||||
@@ -1,112 +1,112 @@
|
||||
import { isBefore } from "date-fns";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js";
|
||||
import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
|
||||
|
||||
export default async function fifoIndexCheck() {
|
||||
/**
|
||||
* getting the shipped pallets
|
||||
*/
|
||||
const { data: shipped, error: eShipped } = await tryCatch(
|
||||
query(shippedPallets, "notify shipped pallets")
|
||||
);
|
||||
/**
|
||||
* getting the shipped pallets
|
||||
*/
|
||||
const { data: shipped, error: eShipped } = await tryCatch(
|
||||
query(shippedPallets, "notify shipped pallets"),
|
||||
);
|
||||
|
||||
const { data: currentStuff, error: eCurrentInv } = await tryCatch(
|
||||
query(currentInv, "notify shipped pallets")
|
||||
);
|
||||
const { data: currentStuff, error: eCurrentInv } = await tryCatch(
|
||||
query(currentInv, "notify shipped pallets"),
|
||||
);
|
||||
|
||||
// console.log(shipped?.data[2]);
|
||||
// console.log(currentStuff?.data[2]);
|
||||
// console.log(shipped?.data[2]);
|
||||
// console.log(currentStuff?.data[2]);
|
||||
|
||||
/**
|
||||
* We want to check if the each shippened pallet is out of fifo
|
||||
*/
|
||||
const check = shipped?.data.map((n: any) => {
|
||||
/**
|
||||
* Returns all data so we know if we are in or out.
|
||||
*/
|
||||
//check if there are pallets older than the current one we are mapped on.
|
||||
const fifoCheck = currentStuff?.data.filter(
|
||||
(i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av
|
||||
);
|
||||
//console.log(fifoCheck.length);
|
||||
if (fifoCheck.length > 0) {
|
||||
// console.log("Out of fifo", {
|
||||
// av: n.av,
|
||||
// rn: n.runningNr,
|
||||
// fRn: fifoCheck[0].runningNr,
|
||||
// dates: [fifoCheck[0].prodDate, n.prodDate],
|
||||
// });
|
||||
}
|
||||
/**
|
||||
* We want to check if the each shippened pallet is out of fifo
|
||||
*/
|
||||
const check: any = shipped?.data.map((n: any) => {
|
||||
/**
|
||||
* Returns all data so we know if we are in or out.
|
||||
*/
|
||||
//check if there are pallets older than the current one we are mapped on.
|
||||
const fifoCheck = currentStuff?.data.filter(
|
||||
(i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av,
|
||||
) as any;
|
||||
//console.log(fifoCheck.length);
|
||||
if (fifoCheck.length > 0) {
|
||||
// console.log("Out of fifo", {
|
||||
// av: n.av,
|
||||
// rn: n.runningNr,
|
||||
// fRn: fifoCheck[0].runningNr,
|
||||
// dates: [fifoCheck[0].prodDate, n.prodDate],
|
||||
// });
|
||||
}
|
||||
|
||||
return {
|
||||
...n,
|
||||
// currentInv: fifoCheck[0],
|
||||
fifoFollowed: fifoCheck.length === 0 ? true : false,
|
||||
};
|
||||
});
|
||||
return {
|
||||
...n,
|
||||
// currentInv: fifoCheck[0],
|
||||
fifoFollowed: fifoCheck.length === 0 ? true : false,
|
||||
};
|
||||
});
|
||||
|
||||
/**
|
||||
* lets see just the av that is our or in
|
||||
*/
|
||||
/**
|
||||
* lets see just the av that is our or in
|
||||
*/
|
||||
|
||||
const avCheck = (check: any) => {
|
||||
/**
|
||||
* This will only return the data based on out of fifo.
|
||||
*/
|
||||
// check how many times each av showed up
|
||||
const avCounts = check.reduce((a: any, c: any) => {
|
||||
if (c.fifoFollowed === false) {
|
||||
const avValue = c.av;
|
||||
a[avValue] = (a[avValue] || 0) + 1;
|
||||
}
|
||||
return a;
|
||||
}, {});
|
||||
const avCheck = (check: any) => {
|
||||
/**
|
||||
* This will only return the data based on out of fifo.
|
||||
*/
|
||||
// check how many times each av showed up
|
||||
const avCounts = check.reduce((a: any, c: any) => {
|
||||
if (c.fifoFollowed === false) {
|
||||
const avValue = c.av;
|
||||
a[avValue] = (a[avValue] || 0) + 1;
|
||||
}
|
||||
return a;
|
||||
}, {});
|
||||
|
||||
// transform them back to an avCount Object
|
||||
const result = Object.keys(avCounts).map((av) => ({
|
||||
av: parseInt(av, 10),
|
||||
count: avCounts[av],
|
||||
}));
|
||||
// transform them back to an avCount Object
|
||||
const result = Object.keys(avCounts).map((av) => ({
|
||||
av: parseInt(av, 10),
|
||||
count: avCounts[av],
|
||||
}));
|
||||
|
||||
return result;
|
||||
};
|
||||
return result;
|
||||
};
|
||||
|
||||
const outOfFifo: any = avCheck(check);
|
||||
const totalOut = outOfFifo.reduce((sum: any, c: any) => {
|
||||
return sum + c.count;
|
||||
}, 0);
|
||||
const outOfFifo: any = avCheck(check);
|
||||
const totalOut = outOfFifo.reduce((sum: any, c: any) => {
|
||||
return sum + c.count;
|
||||
}, 0);
|
||||
|
||||
/**
|
||||
* add the data to the db
|
||||
*/
|
||||
for (let i = 0; i < check.length; i++) {
|
||||
const { data: dbInsert, error: dbE } = await tryCatch(
|
||||
db
|
||||
.insert(fifoIndex)
|
||||
.values({
|
||||
lot: check[i].lot,
|
||||
av: check[i].av,
|
||||
runningNr: check[i].runningNr,
|
||||
prodDate: check[i].prodDate,
|
||||
fifoFollowed: check[i].fifoFollowed,
|
||||
add_Date: check[i].add_Date,
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
);
|
||||
}
|
||||
/**
|
||||
* add the data to the db
|
||||
*/
|
||||
for (let i = 0; i < check!.length; i++) {
|
||||
const { data: dbInsert, error: dbE } = await tryCatch(
|
||||
db
|
||||
.insert(fifoIndex)
|
||||
.values({
|
||||
lot: check[i].lot,
|
||||
av: check[i].av,
|
||||
runningNr: check[i].runningNr,
|
||||
prodDate: check[i].prodDate,
|
||||
fifoFollowed: check[i].fifoFollowed,
|
||||
add_Date: check[i].add_Date,
|
||||
})
|
||||
.onConflictDoNothing(),
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Fifo index data",
|
||||
data: {
|
||||
palletsOut: check,
|
||||
totalShipped: shipped?.data.length,
|
||||
inFifo: shipped?.data.length - totalOut,
|
||||
outOfFifoData: outOfFifo,
|
||||
},
|
||||
};
|
||||
return {
|
||||
success: true,
|
||||
message: "Fifo index data",
|
||||
data: {
|
||||
palletsOut: check,
|
||||
totalShipped: shipped?.data.length,
|
||||
inFifo: shipped!.data.length - totalOut,
|
||||
outOfFifoData: outOfFifo,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,183 @@
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
|
||||
let running = false;
|
||||
export default async function platToPlantEdi(notifyData: any) {
|
||||
createLog("info", "plantToPlant", "notify", `monitoring ${notifyData.name}`);
|
||||
if (running) {
|
||||
createLog(
|
||||
"info",
|
||||
"plantToPlant",
|
||||
"notify",
|
||||
`Notifcation ${notifyData.name} is already running skipping`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
running = true;
|
||||
|
||||
const { data: noti, error: notiError } = (await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(eq(notifications.name, notifyData.name)),
|
||||
)) as any;
|
||||
|
||||
if (notiError) {
|
||||
createLog(
|
||||
"error",
|
||||
"edi",
|
||||
"notify",
|
||||
"Error in getting the notification data",
|
||||
);
|
||||
}
|
||||
|
||||
// get the default emails they can be blank if as we will only add these to the end of the email from the full flow
|
||||
let emails = noti[0]?.email ?? "";
|
||||
|
||||
const checkBol = sqlQuerySelector("checkBol.query");
|
||||
|
||||
if (!checkBol.success) {
|
||||
createLog("error", "edi", "notify", "Error in getting the bol query data");
|
||||
}
|
||||
|
||||
const pLinkedB = sqlQuerySelector("palletsLinkedToBol.query");
|
||||
|
||||
if (!pLinkedB.success) {
|
||||
createLog("error", "edi", "notify", "Error in getting the bol query data");
|
||||
}
|
||||
|
||||
let ignoreBols: string[] = noti[0]?.notifiySettings?.processedBol ?? [];
|
||||
|
||||
const joinBols = ignoreBols.join(",");
|
||||
|
||||
let updateQuery = noti[0]?.notifiySettings?.includeAll
|
||||
? checkBol?.query?.replace(
|
||||
"and a.bezeichnung like '%Alpla%'",
|
||||
"--and a.bezeichnung like '%Alpla%'",
|
||||
)
|
||||
: checkBol?.query;
|
||||
|
||||
const { data: b, error: bError } = (await tryCatch(
|
||||
query(
|
||||
updateQuery
|
||||
?.replace("[timeCheck]", noti[0]?.checkInterval ?? "30")
|
||||
.replace("[ignoreBols]", joinBols ?? 500) ?? "",
|
||||
"Check bol",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
if (bError) {
|
||||
return {
|
||||
success: false,
|
||||
message: "Error getting newly created bols",
|
||||
data: bError,
|
||||
};
|
||||
}
|
||||
|
||||
const planedByBol = new Map<string, string[]>();
|
||||
|
||||
for (const row of b.data) {
|
||||
if (!planedByBol.has(row.bol)) {
|
||||
planedByBol.set(row.bol, []);
|
||||
}
|
||||
|
||||
planedByBol.get(row.bol)!.push(String(row.idladeplanung));
|
||||
}
|
||||
|
||||
if (b.data.length > 0) {
|
||||
// loop each bol in the system and get the bols only
|
||||
for (const [bolNumber, idList] of planedByBol.entries()) {
|
||||
//for (const bol of b.data) {
|
||||
// run the process to get the the pallet numbers
|
||||
|
||||
const joinedIdLadeplanung = idList.join(",");
|
||||
|
||||
//console.log("BOL:", bolNumber);
|
||||
//console.log("IDLadeplanung string:", joinedIdLadeplanung);
|
||||
//console.log("IgnoreBols: ", joinBols);
|
||||
|
||||
// now get the pallets that are witing the ladeplanning
|
||||
const { data: pallets, error: pError } = await tryCatch(
|
||||
query(
|
||||
pLinkedB?.query?.replace(
|
||||
"[palLinkedToBol]",
|
||||
joinedIdLadeplanung ?? "0",
|
||||
) ?? "",
|
||||
"Get Pallets linked in the bol",
|
||||
),
|
||||
);
|
||||
|
||||
//console.log(pallets);
|
||||
|
||||
// console.log("Address: ", b.data[0].addressId ?? "0");
|
||||
|
||||
if (b.data[0].addressId === "") return;
|
||||
|
||||
ignoreBols.push(bolNumber);
|
||||
if (ignoreBols.length > 15) {
|
||||
ignoreBols.splice(0, ignoreBols.length - 15);
|
||||
}
|
||||
|
||||
// get the email address.
|
||||
const checkBol = sqlQuerySelector("addressInfo.query");
|
||||
|
||||
const { data: address, error: aError } = (await tryCatch(
|
||||
query(
|
||||
checkBol?.query?.replace(
|
||||
"[customerAddress]",
|
||||
b.data[0].addressId ?? "0",
|
||||
) ?? "",
|
||||
"Get Pallets linked in the bol",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
if (noti[0]?.emails === "") return; // no default emails
|
||||
// setup the email to be sent :D
|
||||
const emailSetup = {
|
||||
email: `${noti[0]?.emails};${address.data[0].email ?? ""}`,
|
||||
subject: `New EDI transfer Created for BOL: ${bolNumber}`,
|
||||
template: "plantToPlantEdi",
|
||||
context: {
|
||||
items: pallets?.data ?? [],
|
||||
bol: bolNumber,
|
||||
//secondarySetting: notifyData.notifiySettings,
|
||||
},
|
||||
};
|
||||
|
||||
// send the email
|
||||
await sendEmail(emailSetup);
|
||||
|
||||
// add the bols to be ignored
|
||||
await db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...noti[0]?.notifiySettings,
|
||||
processedBol: ignoreBols,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name));
|
||||
}
|
||||
|
||||
running = false;
|
||||
return {
|
||||
success: true,
|
||||
message: "All bols have been processed",
|
||||
data: [ignoreBols],
|
||||
};
|
||||
}
|
||||
running = false;
|
||||
return {
|
||||
success: true,
|
||||
message: "No new bols have been created",
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
|
||||
|
||||
import { errorMonitor } from "node:events";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { settings } from "../../../../../database/schema/settings.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
|
||||
export interface DownTime {
|
||||
downTimeId?: number;
|
||||
machineAlias?: string;
|
||||
}
|
||||
export default async function tooManyErrors(notifyData: any) {
|
||||
// we will over ride this with users that want to sub to this
|
||||
// a new table will be called subalerts and link to the do a kinda linkn where the user wants it then it dose subId: 1, userID: x, notificationId: y. then in here we look up the userid to get the email :D
|
||||
// this could then leave the emails in the notificaion blank and let users sub to it.
|
||||
//console.log(notifyData);
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`,
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// console.log(data.secondarySetting[0].duration);
|
||||
|
||||
const plant = await db
|
||||
.select()
|
||||
.from(settings)
|
||||
.where(eq(settings.name, "plantToken"));
|
||||
console.log(plant[0].value);
|
||||
// console.log(
|
||||
// errorQuery
|
||||
// .replace("[time]", notifyData.checkInterval)
|
||||
// .replace("[errorCount]", notifyData.notifiySettings.errorCount),
|
||||
// errorLogQuery.replace("[time]", notifyData.checkInterval),
|
||||
// );
|
||||
|
||||
let errorLogData: any = [];
|
||||
try {
|
||||
const errorData = await db.execute(sql`
|
||||
SELECT 'error' AS level, COUNT(*) AS error_count
|
||||
FROM public.logs
|
||||
WHERE level = 'error'
|
||||
AND "add_Date" > now() - INTERVAL ${sql.raw(`'${notifyData.checkInterval} minutes'`)}
|
||||
GROUP BY level
|
||||
HAVING COUNT(*) >= ${notifyData.notifiySettings.errorCount}
|
||||
`);
|
||||
if (
|
||||
errorData.length > 0
|
||||
// && downTime[0]?.downTimeId > notifyData.notifiySettings.prodID
|
||||
) {
|
||||
const errorLogs = await db.execute(sql`
|
||||
select* from public.logs where level = 'error' and "add_Date" > now() - INTERVAL ${sql.raw(`'${notifyData.checkInterval} minutes'`)} order by "add_Date" desc;
|
||||
`);
|
||||
|
||||
errorLogData = errorLogs;
|
||||
//send the email :D
|
||||
const emailSetup = {
|
||||
email: notifyData.emails,
|
||||
subject: `Alert! ${plant[0].value} has encountered ${
|
||||
errorLogData.length
|
||||
} ${errorLogData.length > 1 ? "errors" : "error"} in the last ${notifyData.checkInterval} min`,
|
||||
template: "tooManyErrors",
|
||||
context: {
|
||||
data: errorLogData.slice(0, 100),
|
||||
count: notifyData.notifiySettings.errorCount,
|
||||
time: notifyData.checkInterval,
|
||||
},
|
||||
};
|
||||
|
||||
//console.log(emailSetup);
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval",
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: "Failed to send email, will try again on next interval",
|
||||
data: sentEmail,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`Error from running the downtimeCheck query: ${err}`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
message: "Error running error data",
|
||||
data: err,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Error log checking ran",
|
||||
data: errorLogData ?? [],
|
||||
};
|
||||
}
|
||||
@@ -45,57 +45,57 @@ export const sendEmail = async (data: any): Promise<any> => {
|
||||
};
|
||||
}
|
||||
// get the plantToken
|
||||
const server = settingData.filter((n) => n.name === "server");
|
||||
//const server = settingData.filter((n) => n.name === "server");
|
||||
|
||||
if (
|
||||
server[0].value === "localhostx" &&
|
||||
process.env.EMAIL_USER &&
|
||||
process.env.EMAIL_PASSWORD
|
||||
) {
|
||||
transporter = nodemailer.createTransport({
|
||||
service: "gmail",
|
||||
host: "smtp.gmail.com",
|
||||
port: 465,
|
||||
auth: {
|
||||
user: process.env.EMAIL_USER,
|
||||
pass: process.env.EMAIL_PASSWORD,
|
||||
},
|
||||
//debug: true,
|
||||
});
|
||||
// if (
|
||||
// server[0].value === "localhostx" &&
|
||||
// process.env.EMAIL_USER &&
|
||||
// process.env.EMAIL_PASSWORD
|
||||
// ) {
|
||||
// transporter = nodemailer.createTransport({
|
||||
// service: "gmail",
|
||||
// host: "smtp.gmail.com",
|
||||
// port: 465,
|
||||
// auth: {
|
||||
// user: process.env.EMAIL_USER,
|
||||
// pass: process.env.EMAIL_PASSWORD,
|
||||
// },
|
||||
// //debug: true,
|
||||
// });
|
||||
|
||||
// update the from email
|
||||
fromEmail = process.env.EMAIL_USER;
|
||||
} else {
|
||||
// convert to the correct plant token.
|
||||
const plantToken = settingData.filter((s) => s.name === "plantToken");
|
||||
// // update the from email
|
||||
// fromEmail = process.env.EMAIL_USER;
|
||||
//} else {
|
||||
// convert to the correct plant token.
|
||||
//const plantToken = settingData.filter((s) => s.name === "plantToken");
|
||||
|
||||
let host = `${plantToken[0].value}-smtp.alpla.net`;
|
||||
// let host = `${plantToken[0].value}-smtp.alpla.net`;
|
||||
|
||||
const testServers = ["test1", "test2", "test3"];
|
||||
// const testServers = ["test1", "test2", "test3"];
|
||||
|
||||
if (testServers.includes(plantToken[0].value)) {
|
||||
host = "USMCD1-smtp.alpla.net";
|
||||
}
|
||||
// if (testServers.includes(plantToken[0].value)) {
|
||||
// host = "USMCD1-smtp.alpla.net";
|
||||
// }
|
||||
|
||||
if (plantToken[0].value === "usiow2") {
|
||||
host = "USIOW1-smtp.alpla.net";
|
||||
}
|
||||
// if (plantToken[0].value === "usiow2") {
|
||||
// host = "USIOW1-smtp.alpla.net";
|
||||
// }
|
||||
|
||||
transporter = nodemailer.createTransport({
|
||||
host: host,
|
||||
port: 25,
|
||||
rejectUnauthorized: false,
|
||||
//secure: false,
|
||||
// auth: {
|
||||
// user: "alplaprod",
|
||||
// pass: "obelix",
|
||||
// },
|
||||
debug: true,
|
||||
} as SMTPTransport.Options);
|
||||
|
||||
// update the from email
|
||||
fromEmail = `noreply@alpla.com`;
|
||||
}
|
||||
transporter = nodemailer.createTransport({
|
||||
host: "smtp.azurecomm.net",
|
||||
port: 587,
|
||||
//rejectUnauthorized: false,
|
||||
tls: {
|
||||
minVersion: "TLSv1.2",
|
||||
},
|
||||
auth: {
|
||||
user: "donotreply@mail.alpla.com",
|
||||
pass: process.env.SMTP_PASSWORD,
|
||||
},
|
||||
debug: true,
|
||||
});
|
||||
fromEmail = `DoNotReply@mail.alpla.com`;
|
||||
//}
|
||||
|
||||
// creating the handlbar options
|
||||
const viewPath = path.resolve(
|
||||
|
||||
@@ -10,7 +10,9 @@ import tiTrigger from "./routes/manualTiggerTi.js";
|
||||
import materialCheck from "./routes/materialPerDay.js";
|
||||
import blocking from "./routes/qualityBlocking.js";
|
||||
import sendemail from "./routes/sendMail.js";
|
||||
import errorHandling from "./routes/tooManyErrors.js";
|
||||
import { note, notificationCreate } from "./utils/masterNotifications.js";
|
||||
import { sqlJobCleanUp } from "./utils/notificationSqlCleanup.js";
|
||||
import { startNotificationMonitor } from "./utils/processNotifications.js";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
@@ -23,6 +25,7 @@ const routes = [
|
||||
notify,
|
||||
fifoIndex,
|
||||
materialCheck,
|
||||
errorHandling,
|
||||
] as const;
|
||||
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
@@ -55,6 +58,7 @@ if (notesError) {
|
||||
setTimeout(() => {
|
||||
notificationCreate();
|
||||
startNotificationMonitor();
|
||||
sqlJobCleanUp();
|
||||
}, 5 * 1000);
|
||||
|
||||
export default app;
|
||||
|
||||
50
lstV2/server/services/notifications/routes/tooManyErrors.ts
Normal file
50
lstV2/server/services/notifications/routes/tooManyErrors.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
// an external way to creating logs
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../database/schema/notifications.js";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
|
||||
import hasCorrectRole from "../../auth/middleware/roleCheck.js";
|
||||
import tooManyErrors from "../controller/notifications/tooManyErrors.js";
|
||||
import { getAllJobs } from "../utils/processNotifications.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["server"],
|
||||
summary: "Returns current active notifications.",
|
||||
method: "get",
|
||||
path: "/toomanyerrors",
|
||||
middleware: [authMiddleware, hasCorrectRole(["systemAdmin"], "admin")],
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
apiHit(c, { endpoint: "/toomanyerrors" });
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(eq(notifications.name, "tooManyErrors")),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return c.json({
|
||||
success: false,
|
||||
message: "Error Getting Notification Settings.",
|
||||
data: error,
|
||||
});
|
||||
}
|
||||
const errorData = await tooManyErrors(data[0]);
|
||||
return c.json({
|
||||
success: true,
|
||||
message: "Current Error log data",
|
||||
data: errorData?.data,
|
||||
});
|
||||
},
|
||||
);
|
||||
export default app;
|
||||
@@ -3,175 +3,208 @@ import { notifications } from "../../../../database/schema/notifications.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
|
||||
export const note: any = [
|
||||
{
|
||||
name: "reprintLabels",
|
||||
description:
|
||||
"Monitors the labels that are printed and returns a value if one falls withing the time frame defined below.",
|
||||
checkInterval: 1,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1 },
|
||||
},
|
||||
{
|
||||
name: "downTimeCheck",
|
||||
description:
|
||||
"Checks for specific downtimes that are greater than 105 min.",
|
||||
checkInterval: 30,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1, daysInPast: 5, duration: 105 },
|
||||
},
|
||||
{
|
||||
name: "qualityBlocking",
|
||||
description:
|
||||
"Checks for new blocking orders that have been entered, recommened to get the most recent order in here before activating.",
|
||||
checkInterval: 30,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
sentBlockingOrders: [{ timeStamp: "0", blockingOrder: 1 }],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "productionCheck",
|
||||
description: "Checks ppoo",
|
||||
checkInterval: 2,
|
||||
timeType: "hour",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
count: 0,
|
||||
weekend: false,
|
||||
locations: "0",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "stagingCheck",
|
||||
description:
|
||||
"Checks staging based on locations, locations need to be seperated by a ,",
|
||||
checkInterval: 2,
|
||||
timeType: "hour",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
count: 0,
|
||||
weekend: false,
|
||||
locations: "0",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tiIntergration",
|
||||
description: "Checks for new releases to be put into ti",
|
||||
checkInterval: 60,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 36,
|
||||
releases: [{ timeStamp: "0", releaseNumber: 1 }],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "exampleNotification",
|
||||
description: "Checks for new releases to be put into ti",
|
||||
checkInterval: 2,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: true,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 36,
|
||||
releases: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "fifoIndex",
|
||||
description: "Checks for pallets that were shipped out of fifo",
|
||||
checkInterval: 1,
|
||||
timeType: "hour",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 36,
|
||||
releases: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bow2henkelincoming",
|
||||
description:
|
||||
"Checks for new incoming goods orders to be completed and sends an email for what truck and carrier it was",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: { processTime: 15 },
|
||||
},
|
||||
{
|
||||
name: "palletsRemovedAsWaste",
|
||||
description:
|
||||
"Validates stock to make sure, there are no pallets released that have been removed as waste already ",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1 },
|
||||
},
|
||||
{
|
||||
name: "shortageBookings",
|
||||
description:
|
||||
"Checks for material shortage bookings by single av type or all types ",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
time: 15,
|
||||
type: "all", // change this to something else or leave blank to use the av type
|
||||
avType: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "reprintLabels",
|
||||
description:
|
||||
"Monitors the labels that are printed and returns a value if one falls withing the time frame defined below.",
|
||||
checkInterval: 1,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1 },
|
||||
},
|
||||
{
|
||||
name: "downTimeCheck",
|
||||
description: "Checks for specific downtimes that are greater than 105 min.",
|
||||
checkInterval: 30,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1, daysInPast: 5, duration: 105 },
|
||||
},
|
||||
{
|
||||
name: "qualityBlocking",
|
||||
description:
|
||||
"Checks for new blocking orders that have been entered, recommened to get the most recent order in here before activating.",
|
||||
checkInterval: 30,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
sentBlockingOrders: [{ timeStamp: "0", blockingOrder: 1 }],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "productionCheck",
|
||||
description: "Checks ppoo",
|
||||
checkInterval: 2,
|
||||
timeType: "hour",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
count: 0,
|
||||
weekend: false,
|
||||
locations: "0",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "stagingCheck",
|
||||
description:
|
||||
"Checks staging based on locations, locations need to be seperated by a ,",
|
||||
checkInterval: 2,
|
||||
timeType: "hour",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
count: 0,
|
||||
weekend: false,
|
||||
locations: "0",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tiIntergration",
|
||||
description: "Checks for new releases to be put into ti",
|
||||
checkInterval: 60,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 36,
|
||||
releases: [{ timeStamp: "0", releaseNumber: 1 }],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "exampleNotification",
|
||||
description: "Checks for new releases to be put into ti",
|
||||
checkInterval: 2,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: true,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 36,
|
||||
releases: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "fifoIndex",
|
||||
description: "Checks for pallets that were shipped out of fifo",
|
||||
checkInterval: 1,
|
||||
timeType: "hour",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 36,
|
||||
releases: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bow2henkelincoming",
|
||||
description:
|
||||
"Checks for new incoming goods orders to be completed and sends an email for what truck and carrier it was",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: { processTime: 15 },
|
||||
},
|
||||
{
|
||||
name: "palletsRemovedAsWaste",
|
||||
description:
|
||||
"Validates stock to make sure, there are no pallets released that have been removed as waste already ",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1 },
|
||||
},
|
||||
{
|
||||
name: "shortageBookings",
|
||||
description:
|
||||
"Checks for material shortage bookings by single av type or all types ",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
time: 15,
|
||||
type: "all", // change this to something else or leave blank to use the av type
|
||||
avType: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tooManyErrors",
|
||||
description:
|
||||
"Checks to see how many errors in the last x time and sends an email based on this.",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com",
|
||||
active: true,
|
||||
notifiySettings: {
|
||||
errorCount: 10, // change this to something else or leave blank to use the av type
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "cycleCountCheck",
|
||||
description:
|
||||
"Checks if a cycle count has been active for longer than the defined time.",
|
||||
checkInterval: 60,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
errorCount: 10, // change this to something else or leave blank to use the av type
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "platToPlantEdi",
|
||||
description:
|
||||
"This is the plant to plant edi that will send an edi to the email once it ships, the emails will be for the receiving plants",
|
||||
checkInterval: 15,
|
||||
timeType: "min",
|
||||
emails: "blake.matthes@alpla.com;Maritza.Hernandez@alpla.com",
|
||||
active: false,
|
||||
notifiySettings: { processedBol: [500], includeAll: false },
|
||||
},
|
||||
];
|
||||
|
||||
export const notificationCreate = async () => {
|
||||
for (let i = 0; i < note.length; i++) {
|
||||
try {
|
||||
const notify = await db
|
||||
.insert(notifications)
|
||||
.values(note[i])
|
||||
.onConflictDoUpdate({
|
||||
target: notifications.name,
|
||||
set: {
|
||||
name: note[i].name,
|
||||
description: note[i].description,
|
||||
//notifiySettings: note[i].notifiySettings,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There was an error getting the notifications: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"nofity",
|
||||
"notifications were just added/updated due to server startup"
|
||||
);
|
||||
for (let i = 0; i < note.length; i++) {
|
||||
try {
|
||||
const notify = await db
|
||||
.insert(notifications)
|
||||
.values(note[i])
|
||||
.onConflictDoUpdate({
|
||||
target: notifications.name,
|
||||
set: {
|
||||
name: note[i].name,
|
||||
description: note[i].description,
|
||||
//notifiySettings: note[i].notifiySettings,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There was an error getting the notifications: ${JSON.stringify(
|
||||
error,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"nofity",
|
||||
"notifications were just added/updated due to server startup",
|
||||
);
|
||||
};
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../../sqlServer/utils/querySelector.utils.js";
|
||||
|
||||
const cleanUpQuery = `
|
||||
DECLARE @JobName varchar(max) = '[jobName]'
|
||||
UPDATE msdb.dbo.sysjobs
|
||||
SET enabled = 0
|
||||
WHERE name = @JobName;
|
||||
`;
|
||||
|
||||
// disable the jobs
|
||||
const jobNames: string[] = [
|
||||
"monitor_$_lots",
|
||||
"monitor_$_lots_2",
|
||||
"monitor$lots",
|
||||
"Monitor_APO", //listen for people to cry this is no longer a thing
|
||||
"Monitor_APO2",
|
||||
"Monitor_AutoConsumeMaterials", // TODO: migrate to lst
|
||||
"Monitor_AutoConsumeMaterials_iow1",
|
||||
"Monitor_AutoConsumeMaterials_iow2",
|
||||
"Monitor_BlockedINV_Loc",
|
||||
"monitor_inv_cycle",
|
||||
"monitor_inv_cycle_1",
|
||||
"monitor_inv_cycle_2",
|
||||
"monitor_edi_import", // TODO: migrate to lst -- for the query select count(*) from AlplaPROD_test3.dbo.T_EDIDokumente (nolock) where /* IdLieferant > 1 and */ add_date > DATEADD(MINUTE, -30, getdate())
|
||||
"Monitor_Lot_Progression",
|
||||
"Monitor_Lots", // TODO: migrate to lst -- this should be the one where we monitor the when a lot is assigned if its missing some data.
|
||||
"Monitor_MinMax", // TODO:Migrate to lst
|
||||
"Monitor_MinMax_iow2",
|
||||
"Monitor_PM",
|
||||
"Monitor_Purity",
|
||||
"monitor_wastebookings", // TODO: Migrate
|
||||
"LastPriceUpdate", // not even sure what this is
|
||||
"GETLabelsCount", // seems like an old jc job
|
||||
"jobforpuritycount", // was not even working correctly
|
||||
"Monitor_EmptyAutoConsumLocations", // not sure who uses this one
|
||||
"monitor_labelreprint", // Migrated but need to find out who really wants this
|
||||
"test", // not even sure why this is active
|
||||
"UpdateLastMoldUsed", // old jc inserts data into a table but not sure what its used for not linked to any other alert
|
||||
"UpdateWhsePositions3", // old jc inserts data into a table but not sure what its used for not linked to any other alert
|
||||
"UpdateWhsePositions4",
|
||||
"delete_print", // i think this was in here for when we was having lag prints in iowa1
|
||||
"INV_WHSE_1", // something random i wrote long time ago looks like an inv thing to see aged stuff
|
||||
"INV_WHSE_2",
|
||||
"laneAgeCheck", // another strange one thats been since moved to lst
|
||||
"monitor_blocking_2",
|
||||
"monitor_blocking", // already in lst
|
||||
"monitor_min_inv", // do we still want this one? it has a description of: this checks m-f the min inventory of materials based on the min level set in stock
|
||||
"Monitor_MixedLocations",
|
||||
"Monitor_PM",
|
||||
"Monitor_PM2",
|
||||
"wrong_lots_1",
|
||||
"wrong_lots_2",
|
||||
"invenotry check", // spelling error one of my stupids
|
||||
"monitor_hold_monitor",
|
||||
"Monitor_Silo_adjustments",
|
||||
"monitor_qualityLocMonitor", // validating with lima this is still needed
|
||||
];
|
||||
|
||||
export const sqlJobCleanUp = async () => {
|
||||
// running a query to disable jobs that are moved to lst to be better maintained
|
||||
const sqlQuery = sqlQuerySelector("disableJob.query") as SqlQuery;
|
||||
|
||||
if (!sqlQuery.success) {
|
||||
console.log("Failed to load the query: ", sqlQuery.message);
|
||||
return;
|
||||
}
|
||||
for (const job of jobNames) {
|
||||
const { data, error } = await tryCatch(
|
||||
query(
|
||||
sqlQuery.query.replace("[jobName]", `${job}`),
|
||||
`Disabling job: ${job}`,
|
||||
),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
//console.log(data);
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,44 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
|
||||
{{> styles}}
|
||||
</head>
|
||||
<body>
|
||||
<p>All,</p>
|
||||
<p>The below are cycle counts that have been in progress for longer than {{checkTime}} min(s). </p>
|
||||
<table >
|
||||
<thead>
|
||||
<tr>
|
||||
<th>WarehouseID</th>
|
||||
<th>Warehouse</th>
|
||||
<th>LocationID</th>
|
||||
<th>Location</th>
|
||||
<th>Cycle count Started</th>
|
||||
<th>Started by</th>
|
||||
{{!-- <th>Downtime finish</th> --}}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{{#each items}}
|
||||
<tr>
|
||||
<td>{{idWarehouse}}</td>
|
||||
<td>{{warehouse}}</td>
|
||||
<td>{{locationId}}</td>
|
||||
<td>{{location}}</td>
|
||||
<td>{{cycleCountStartAt}}</td>
|
||||
<td>{{blockedBy}}</td>
|
||||
{{!-- <td>{{dtEnd}}</td> --}}
|
||||
</tr>
|
||||
{{/each}}
|
||||
</tbody>
|
||||
</table>
|
||||
<div>
|
||||
<p>Thank you,</p>
|
||||
<p>LST Team</p>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,46 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
|
||||
{{> styles}}
|
||||
</head>
|
||||
<body>
|
||||
<p>All,</p>
|
||||
<p>BOL: {{bol}} was created with the below pallets.</p>
|
||||
<p>Please head to stock and import the pallets via the normal incoming goods process (now/immediately).</p>
|
||||
<p>When encountering a discrepancy in pallets/cages received, please correct this after the pallets have been imported.</p>
|
||||
<p>Due to these being plant to plant shipments, the only way to correct this is to bring them in then undo the incoming goods process.</p>
|
||||
<br></br>
|
||||
<table >
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Running Number</th>
|
||||
<th>AV</th>
|
||||
<th>Description</th>
|
||||
<th>Lot number</th>
|
||||
<th>Quantity</th>
|
||||
{{!-- <th>Downtime finish</th> --}}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{{#each items}}
|
||||
<tr>
|
||||
<td>{{runningNr}}</td>
|
||||
<td>{{article}}</td>
|
||||
<td>{{alias}}</td>
|
||||
<td>{{lotNumber}}</td>
|
||||
<td>{{qty}}</td>
|
||||
{{!-- <td>{{dtEnd}}</td> --}}
|
||||
</tr>
|
||||
{{/each}}
|
||||
</tbody>
|
||||
</table>
|
||||
<div>
|
||||
<p>Thank you,</p>
|
||||
<p>LST Team</p>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,42 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
|
||||
{{> styles}}
|
||||
</head>
|
||||
<body>
|
||||
<p>All,</p>
|
||||
<p>The plant has encountered more than {{count}} errors in the last {{time}} mins, please see below errors and address as needed. </p>
|
||||
<table >
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Username</th>
|
||||
<th>Service</th>
|
||||
<th>Message</th>
|
||||
<th>Checked</th>
|
||||
<th>LogTime</th>
|
||||
{{!-- <th>Downtime finish</th> --}}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{{#each data}}
|
||||
<tr>
|
||||
<td>{{username}}</td>
|
||||
<td>{{service}}</td>
|
||||
<td>{{message}}</td>
|
||||
<td>{{checked}}</td>
|
||||
<td>{{add_Date}}</td>
|
||||
{{!-- <td>{{dtEnd}}</td> --}}
|
||||
</tr>
|
||||
{{/each}}
|
||||
</tbody>
|
||||
</table>
|
||||
<div>
|
||||
<p>Thank you,</p>
|
||||
<p>LST Team</p>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -7,7 +7,10 @@ import { mmQuery } from "../../../sqlServer/querys/ocp/mainMaterial.js";
|
||||
|
||||
export const isMainMatStaged = async (lot: any) => {
|
||||
const set = serverSettings.length === 0 ? [] : serverSettings;
|
||||
// make staged false by deefault and error logged if theres an issue
|
||||
const checkColorSetting = set.filter((n) => n.name === "checkColor");
|
||||
const checkPKGSetting = set.filter((n) => n.name === "checkPKG");
|
||||
|
||||
// make staged false by default and error logged if theres an issue
|
||||
let isStaged = { message: "Material is staged", success: true };
|
||||
|
||||
const { data, error } = (await tryCatch(
|
||||
@@ -43,7 +46,7 @@ export const isMainMatStaged = async (lot: any) => {
|
||||
};
|
||||
}
|
||||
|
||||
// strangly the lot is not always sending over in slc so adding this in for now to see what line is cauing this issue
|
||||
// strangely the lot is not always sending over in slc so adding this in for now to see what line is cauing this issue
|
||||
if (!lot) {
|
||||
createLog("info", "mainMaterial", "ocp", "No lot was passed correctly.");
|
||||
return isStaged;
|
||||
@@ -125,7 +128,11 @@ export const isMainMatStaged = async (lot: any) => {
|
||||
createLog("info", "mainMaterial", "ocp", `Maint material query ran.`);
|
||||
|
||||
const mainMaterial = res.find((n: any) => n.IsMainMaterial);
|
||||
if (mainMaterial?.Staged === 1) {
|
||||
|
||||
if (
|
||||
mainMaterial?.Staged === 1 &&
|
||||
(checkColorSetting[0].value !== "1" || checkPKGSetting[0].value !== "1")
|
||||
) {
|
||||
createLog(
|
||||
"info",
|
||||
"mainMaterial",
|
||||
@@ -152,7 +159,6 @@ export const isMainMatStaged = async (lot: any) => {
|
||||
|
||||
// we need to filter the color stuff and then look for includes instead of a standard name. this way we can capture a everything and not a single type
|
||||
// for manual consume color if active to check colors
|
||||
const checkColorSetting = set.filter((n) => n.name === "checkColor");
|
||||
|
||||
// 2. Auto color
|
||||
if (checkColorSetting[0].value === "1") {
|
||||
@@ -162,7 +168,7 @@ export const isMainMatStaged = async (lot: any) => {
|
||||
results: res,
|
||||
lot,
|
||||
filterFn: (n) =>
|
||||
n.isManual &&
|
||||
!n.isManual &&
|
||||
!("noPKGAutoShortage" in n) &&
|
||||
!("noPKGManualShortage" in n), // pool = non-main, auto
|
||||
failCondition: (n) => n.autoConsumeCheck === "autoConsumeNOK", // column = autoConsumeCheck
|
||||
@@ -202,7 +208,7 @@ export const isMainMatStaged = async (lot: any) => {
|
||||
}
|
||||
|
||||
// // if we want to check the packaging
|
||||
const checkPKGSetting = set.filter((n) => n.name === "checkPKG");
|
||||
|
||||
if (checkPKGSetting[0].value === "1") {
|
||||
const pkgAuto = checkCondition({
|
||||
results: res,
|
||||
|
||||
148
lstV2/server/services/ocp/controller/materials/materialChecks.ts
Normal file
148
lstV2/server/services/ocp/controller/materials/materialChecks.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { serverSettings } from "../../../server/controller/settings/getSettings.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { machineCheck } from "../../../sqlServer/querys/ocp/machineId.js";
|
||||
import { mmQuery } from "../../../sqlServer/querys/ocp/mainMaterial.js";
|
||||
|
||||
export const isMainMatStaged = async (lot: any) => {
|
||||
const set = serverSettings.length === 0 ? [] : serverSettings;
|
||||
// make staged false by default and error logged if theres an issue
|
||||
let isStaged = { message: "Material is staged", success: true };
|
||||
|
||||
// validate the machine actaully needs materials to print
|
||||
const { data, error } = (await tryCatch(
|
||||
query(
|
||||
machineCheck.replace("where Active = 1 and [Location] = [loc]", ""),
|
||||
"check machine needs mm",
|
||||
),
|
||||
)) as any;
|
||||
|
||||
const machine = data.data.filter(
|
||||
(m: any) => m.HumanReadableId === lot.machineID,
|
||||
);
|
||||
|
||||
// just in case we encounter an issue with the machines
|
||||
if (machine.length === 0) {
|
||||
createLog(
|
||||
"error",
|
||||
"mainMaterial",
|
||||
"ocp-system",
|
||||
"Invalid machine passed over.",
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: "Invalid machine passed over.",
|
||||
};
|
||||
}
|
||||
|
||||
// we have a check on ksc side to ignore the tetra machine for now as its not updating in 2.0
|
||||
if (!machine[0].StagingMainMaterialMandatory) {
|
||||
createLog(
|
||||
"info",
|
||||
"mainMaterial",
|
||||
"ocp",
|
||||
`The machine dose not require mm to print and book in.`,
|
||||
);
|
||||
return {
|
||||
message: "Machine dose not require material to be staged",
|
||||
success: true,
|
||||
};
|
||||
}
|
||||
|
||||
// strangely the lot is not always sending over in slc so adding this in for now to see what line is cauing this issue
|
||||
if (!lot) {
|
||||
createLog("info", "mainMaterial", "ocp", "No lot was passed correctly.");
|
||||
return isStaged;
|
||||
}
|
||||
|
||||
if (typeof lot !== "object" || lot === null || Array.isArray(lot)) {
|
||||
createLog(
|
||||
"info",
|
||||
"mainMaterial",
|
||||
"ocp",
|
||||
`The lot sent over is not an object: ${JSON.stringify(lot)}`,
|
||||
);
|
||||
return isStaged;
|
||||
}
|
||||
|
||||
// get the materials needed for the passed over lot
|
||||
const { data: material, error: errorMat } = (await tryCatch(
|
||||
query(mmQuery.replaceAll("[lotNumber]", lot.lot), "Main Material Check"),
|
||||
)) as any;
|
||||
|
||||
if (errorMat) {
|
||||
return { message: "Failed to get lot info", success: false };
|
||||
}
|
||||
|
||||
const mat = material.data;
|
||||
|
||||
const mainMaterial = mat.find((n: any) => n.IsMainMaterial);
|
||||
const checkColorSetting = set.filter((n) => n.name === "checkColor");
|
||||
const checkPKGSetting = set.filter((n) => n.name === "checkPKG");
|
||||
|
||||
// if we only care about having the check for mm staged and dont care about the rules we just let it fly by.
|
||||
// risk here is getting $Shortage if there really is nothing
|
||||
if (
|
||||
mainMaterial?.Staged === 1 &&
|
||||
(checkColorSetting[0].value !== "1" || checkPKGSetting[0].value !== "1")
|
||||
) {
|
||||
createLog(
|
||||
"info",
|
||||
"mainMaterial",
|
||||
"ocp",
|
||||
`Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is staged for ${lot.lot}`,
|
||||
);
|
||||
return {
|
||||
message: `Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is staged for ${lot.lot}`,
|
||||
success: true,
|
||||
};
|
||||
}
|
||||
|
||||
// do we have enough main material for the next pallet
|
||||
if (mainMaterial?.noMMShortage === "noMM") {
|
||||
createLog(
|
||||
"info",
|
||||
"mainMaterial",
|
||||
"ocp",
|
||||
`Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is not staged for ${lot.lot}`,
|
||||
);
|
||||
return {
|
||||
message: `Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is not staged for ${lot.lot}`,
|
||||
success: false,
|
||||
};
|
||||
}
|
||||
|
||||
// do we have color to the line
|
||||
if (checkColorSetting[0].value === "1") {
|
||||
const autoConsumeColor = mat.find(
|
||||
(n: any) =>
|
||||
!n.isManual &&
|
||||
!("noPKGAutoShortage" in n) &&
|
||||
!("noPKGManualShortage" in n),
|
||||
);
|
||||
|
||||
if (autoConsumeColor.autoConsumeCheck === "autoConsumeNOK") {
|
||||
createLog(
|
||||
"info",
|
||||
"mainMaterial",
|
||||
"ocp",
|
||||
`lot: ${lot.lot}, is missing: ${autoConsumeColor
|
||||
.map(
|
||||
(o: any) =>
|
||||
`${o.MaterialHumanReadableId} - ${o.MaterialDescription}`,
|
||||
)
|
||||
.join(",\n ")} for autoconsume`,
|
||||
);
|
||||
return {
|
||||
message: `lot: ${lot.lot}, is missing: ${autoConsumeColor
|
||||
.map(
|
||||
(o: any) =>
|
||||
`${o.MaterialHumanReadableId} - ${o.MaterialDescription}`,
|
||||
)
|
||||
.join(",\n ")} for autoconsume`,
|
||||
success: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -20,84 +20,90 @@ const palletSend = new Tag("Zone_6.Ready_to_Send");
|
||||
const strapperError = new Tag("Zone_3.Strapper_Faulted");
|
||||
|
||||
export const dycoConnect = async () => {
|
||||
// if we crash or start over reset the timers so we dont get duplicates
|
||||
clearInterval(plcCycle);
|
||||
if (isDycoRunning)
|
||||
return { success: false, message: "Dyco is already connected." };
|
||||
// if we crash or start over reset the timers so we dont get duplicates
|
||||
clearInterval(plcCycle);
|
||||
if (isDycoRunning)
|
||||
return { success: false, message: "Dyco is already connected." };
|
||||
|
||||
// Remove all listeners before adding a new one to prevent memory leaks
|
||||
PLC.removeAllListeners("error");
|
||||
// Remove all listeners before adding a new one to prevent memory leaks
|
||||
PLC.removeAllListeners("error");
|
||||
|
||||
try {
|
||||
await PLC.connect(plcAddress, 0).then(async () => {
|
||||
createLog("info", "dyco", "ocp", `We are connected to the dyco.`);
|
||||
isDycoRunning = true;
|
||||
try {
|
||||
await PLC.connect(plcAddress, 0).then(async () => {
|
||||
createLog("info", "dyco", "ocp", `We are connected to the dyco.`);
|
||||
isDycoRunning = true;
|
||||
|
||||
plcCycle = setInterval(async () => {
|
||||
if (isReading) {
|
||||
createLog(
|
||||
"debug",
|
||||
"dyco",
|
||||
"ocp",
|
||||
"Skipping cycle: previous read still in progress."
|
||||
);
|
||||
return;
|
||||
}
|
||||
isReading = true; // Set flag
|
||||
try {
|
||||
await PLC.readTag(labelerTag);
|
||||
await PLC.readTag(palletSend);
|
||||
await PLC.readTag(strapperError);
|
||||
plcCycle = setInterval(async () => {
|
||||
if (isReading) {
|
||||
createLog(
|
||||
"debug",
|
||||
"dyco",
|
||||
"ocp",
|
||||
"Skipping cycle: previous read still in progress.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
isReading = true; // Set flag
|
||||
try {
|
||||
await PLC.readTag(labelerTag);
|
||||
await PLC.readTag(palletSend);
|
||||
await PLC.readTag(strapperError);
|
||||
|
||||
// strapper check
|
||||
strapperFaults(strapperError);
|
||||
// strapper check
|
||||
strapperFaults(strapperError);
|
||||
|
||||
// send the labeler tag data over
|
||||
labelerTagRead(labelerTag);
|
||||
// send the labeler tag data over
|
||||
labelerTagRead(labelerTag);
|
||||
|
||||
// send the end of line check over.
|
||||
palletSendTag(palletSend);
|
||||
} catch (error: any) {
|
||||
createLog(
|
||||
"error",
|
||||
"dyco",
|
||||
"ocp",
|
||||
`Error reading PLC tag: ${error.message}`
|
||||
);
|
||||
} finally {
|
||||
isReading = false; // Reset flag
|
||||
}
|
||||
}, plcInterval);
|
||||
});
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"dyco",
|
||||
"ocp",
|
||||
`There was an error in the dyco: ${error}`
|
||||
);
|
||||
await PLC.disconnect();
|
||||
isDycoRunning = false;
|
||||
}
|
||||
// send the end of line check over.
|
||||
palletSendTag(palletSend);
|
||||
} catch (error: any) {
|
||||
createLog(
|
||||
"error",
|
||||
"dyco",
|
||||
"ocp",
|
||||
`Error reading PLC tag: ${error.message}`,
|
||||
);
|
||||
// if we error out we want to disconnect and reconnect
|
||||
closeDyco();
|
||||
setTimeout(() => {
|
||||
createLog("info", "dyco", "ocp", `Reconnecting to the dyco`);
|
||||
dycoConnect();
|
||||
}, 2 * 1000);
|
||||
} finally {
|
||||
isReading = false; // Reset flag
|
||||
}
|
||||
}, plcInterval);
|
||||
});
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"dyco",
|
||||
"ocp",
|
||||
`There was an error in the dyco: ${error}`,
|
||||
);
|
||||
await PLC.disconnect();
|
||||
isDycoRunning = false;
|
||||
}
|
||||
};
|
||||
|
||||
export const closeDyco = async () => {
|
||||
if (!isDycoRunning)
|
||||
return { success: false, message: "Dyco is not connected." };
|
||||
if (!isDycoRunning)
|
||||
return { success: false, message: "Dyco is not connected." };
|
||||
|
||||
console.log(`Closing the connection`);
|
||||
try {
|
||||
await PLC.disconnect();
|
||||
isDycoRunning = false;
|
||||
return {
|
||||
success: true,
|
||||
message: "Dyco Connection is now closed.",
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error closing the dyco connection.",
|
||||
};
|
||||
}
|
||||
console.log(`Closing the connection`);
|
||||
try {
|
||||
await PLC.disconnect();
|
||||
isDycoRunning = false;
|
||||
return {
|
||||
success: true,
|
||||
message: "Dyco Connection is now closed.",
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error closing the dyco connection.",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
import { createPlcMonitor } from "./plcController.js";
|
||||
|
||||
export const zechettiConnect = () => {
|
||||
const config: any = {
|
||||
controllers: [
|
||||
{
|
||||
id: "Zecchetti_1",
|
||||
ip: "192.168.193.97",
|
||||
slot: 0,
|
||||
rpi: 250,
|
||||
printerId: 22, // grabbed from 2.0
|
||||
tags: ["N7[0]"],
|
||||
},
|
||||
{
|
||||
id: "Zecchetti_2",
|
||||
ip: "192.168.193.111",
|
||||
slot: 0,
|
||||
rpi: 100,
|
||||
printerId: 23,
|
||||
tags: ["N8[0]"],
|
||||
},
|
||||
],
|
||||
};
|
||||
const config: any = {
|
||||
controllers: [
|
||||
{
|
||||
id: "Zecchetti_1",
|
||||
ip: "192.168.193.97",
|
||||
slot: 0,
|
||||
rpi: 250,
|
||||
printerId: 22, // grabbed from 2.0
|
||||
tags: ["N7[0]"],
|
||||
},
|
||||
// {
|
||||
// id: "Zecchetti_2",
|
||||
// ip: "192.168.193.111",
|
||||
// slot: 0,
|
||||
// rpi: 100,
|
||||
// printerId: 23,
|
||||
// tags: ["N8[0]"],
|
||||
// },
|
||||
],
|
||||
};
|
||||
|
||||
const monitor = createPlcMonitor(config);
|
||||
const monitor = createPlcMonitor(config);
|
||||
|
||||
// Start
|
||||
monitor.start();
|
||||
// Start
|
||||
monitor.start();
|
||||
};
|
||||
|
||||
@@ -41,7 +41,7 @@ export const qualityCycle = async () => {
|
||||
message: "There was an error getting quality request data",
|
||||
};
|
||||
}
|
||||
const lstQData: any = data;
|
||||
const lstQData: any = data ?? [];
|
||||
// get the pallets that currentStat is moved
|
||||
// const res = await runQuery(palletMoveCheck, "palletCheck");
|
||||
|
||||
|
||||
@@ -1,238 +1,253 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import sql from "mssql";
|
||||
import { prodSqlConfig } from "./utils/prodServerConfig.js";
|
||||
import { createLog } from "../logger/logger.js";
|
||||
import { db } from "../../../database/dbclient.js";
|
||||
import { settings } from "../../../database/schema/settings.js";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { installed } from "../../index.js";
|
||||
import { checkHostnamePort } from "../../globalUtils/pingServer.js";
|
||||
import { installed } from "../../index.js";
|
||||
import { createLog } from "../logger/logger.js";
|
||||
import { serverSettings } from "../server/controller/settings/getSettings.js";
|
||||
import { prodSqlConfig } from "./utils/prodServerConfig.js";
|
||||
|
||||
let pool: any;
|
||||
let pool: sql.ConnectionPool;
|
||||
let connected: boolean = false;
|
||||
export const initializeProdPool = async () => {
|
||||
if (!installed) {
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
"The server was not installed will reconnect in 5 seconds"
|
||||
);
|
||||
setTimeout(() => {
|
||||
initializeProdPool();
|
||||
}, 5 * 1000);
|
||||
if (!installed) {
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
"The server was not installed will reconnect in 5 seconds",
|
||||
);
|
||||
setTimeout(() => {
|
||||
initializeProdPool();
|
||||
}, 5 * 1000);
|
||||
|
||||
return { success: false, message: "The server is not installed." };
|
||||
}
|
||||
// const dbServer = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "dbServer"));
|
||||
return { success: false, message: "The server is not installed." };
|
||||
}
|
||||
// const dbServer = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "dbServer"));
|
||||
|
||||
// the move to the go version for settings
|
||||
const dbServer = serverSettings.filter(
|
||||
(n: any) => n.name === "dbServer"
|
||||
) as any;
|
||||
// the move to the go version for settings
|
||||
const dbServer = serverSettings.filter(
|
||||
(n: any) => n.name === "dbServer",
|
||||
) as any;
|
||||
|
||||
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
|
||||
const serverUp = await checkHostnamePort(
|
||||
`${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
|
||||
);
|
||||
|
||||
if (!serverUp) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"server",
|
||||
`The sql ${dbServer[0].value} is not reachable`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: `The sql ${dbServer[0].value} is not reachable`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
if (!serverUp) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"server",
|
||||
`The sql ${dbServer[0].value} is not reachable`,
|
||||
);
|
||||
// closePool()
|
||||
// setTimeout(() => {
|
||||
// initializeProdPool();
|
||||
// }, 2*1000);
|
||||
return {
|
||||
success: false,
|
||||
message: `The sql ${dbServer[0].value} is not reachable`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
// make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev
|
||||
// const serverLoc = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "dbServer"));
|
||||
// make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev
|
||||
// const serverLoc = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "dbServer"));
|
||||
|
||||
const serverLoc = serverSettings.filter(
|
||||
(n: any) => n.name === "dbServer"
|
||||
) as any;
|
||||
if (
|
||||
serverLoc[0].value === "localhost" &&
|
||||
process.env.NODE_ENV !== "development"
|
||||
) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
"The server is set to localhost, and you are not in development mode."
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The server is set to localhost, and you are not in development mode.",
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
const serverLoc = serverSettings.filter(
|
||||
(n: any) => n.name === "dbServer",
|
||||
) as any;
|
||||
if (
|
||||
serverLoc[0].value === "localhost" &&
|
||||
process.env.NODE_ENV !== "development"
|
||||
) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
"The server is set to localhost, and you are not in development mode.",
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The server is set to localhost, and you are not in development mode.",
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
// if you were restarting from the endpoint you get this lovely error
|
||||
if (connected) {
|
||||
createLog("error", "lst", "sqlProd", "There is already a connection.");
|
||||
return { success: false, message: "There is already a connection." };
|
||||
}
|
||||
try {
|
||||
const config = await prodSqlConfig();
|
||||
pool = await sql.connect(config!);
|
||||
// if you were restarting from the endpoint you get this lovely error
|
||||
if (connected) {
|
||||
createLog("error", "lst", "sqlProd", "There is already a connection.");
|
||||
return { success: false, message: "There is already a connection." };
|
||||
}
|
||||
try {
|
||||
const config = await prodSqlConfig();
|
||||
pool = new sql.ConnectionPool(config!);
|
||||
await pool.connect();
|
||||
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`Connected to ${config?.server}, and looking at ${config?.database}`
|
||||
);
|
||||
connected = true;
|
||||
return {
|
||||
success: true,
|
||||
message: "The sql server connection has been closed",
|
||||
};
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(
|
||||
error
|
||||
)}, "There was an error connecting to the pool."`
|
||||
);
|
||||
throw new Error("There was an error closing the sql connection");
|
||||
}
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`Connected to ${config?.server}, and looking at ${config?.database}`,
|
||||
);
|
||||
connected = true;
|
||||
return {
|
||||
success: true,
|
||||
message: "The sql server connection has been closed",
|
||||
};
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(error)}, "There was an error connecting to the pool."`,
|
||||
);
|
||||
// closePool()
|
||||
// setTimeout(() => {
|
||||
// initializeProdPool();
|
||||
// }, 2*1000);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "The sql server connection has been closed",
|
||||
};
|
||||
//throw new Error("There was an error closing the sql connection");
|
||||
}
|
||||
};
|
||||
|
||||
export const closePool = async () => {
|
||||
if (!connected) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
"There is no connection a connection."
|
||||
);
|
||||
return { success: false, message: "There is already a connection." };
|
||||
}
|
||||
try {
|
||||
await pool.close();
|
||||
createLog("info", "lst", "sqlProd", "Connection pool closed");
|
||||
connected = false;
|
||||
return {
|
||||
success: true,
|
||||
message: "The sql server connection has been closed",
|
||||
};
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(
|
||||
error
|
||||
)}, "There was an error closing the sql connection"`
|
||||
);
|
||||
throw new Error("There was an error closing the sql connection");
|
||||
}
|
||||
if (!connected) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
"There is no connection a connection.",
|
||||
);
|
||||
return { success: false, message: "There is already a connection." };
|
||||
}
|
||||
try {
|
||||
await pool.close();
|
||||
createLog("info", "lst", "sqlProd", "Connection pool closed");
|
||||
connected = false;
|
||||
return {
|
||||
success: true,
|
||||
message: "The sql server connection has been closed",
|
||||
};
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(
|
||||
error,
|
||||
)}, "There was an error closing the sql connection"`,
|
||||
);
|
||||
throw new Error("There was an error closing the sql connection");
|
||||
}
|
||||
};
|
||||
|
||||
export async function query(queryToRun: string, name: string) {
|
||||
/**
|
||||
* Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server
|
||||
*/
|
||||
// const dbServer = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "dbServer"));
|
||||
/**
|
||||
* Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server
|
||||
*/
|
||||
// const dbServer = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "dbServer"));
|
||||
|
||||
const dbServer = serverSettings.filter(
|
||||
(n: any) => n.name === "dbServer"
|
||||
) as any;
|
||||
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
|
||||
const dbServer = serverSettings.filter(
|
||||
(n: any) => n.name === "dbServer",
|
||||
) as any;
|
||||
// const serverUp = await checkHostnamePort(
|
||||
// `${process.env.NODE_ENV !== "development" ? "localhost" : dbServer[0].value}:1433`,
|
||||
// );
|
||||
|
||||
if (!serverUp) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"server",
|
||||
`The sql ${dbServer[0].value} is not reachable`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: `The sql ${dbServer[0].value} is not reachable`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
// if (!serverUp) {
|
||||
// createLog(
|
||||
// "error",
|
||||
// "lst",
|
||||
// "server",
|
||||
// `Failed to run query due to ${dbServer[0].value} not being reachable.`,
|
||||
// );
|
||||
// return {
|
||||
// success: false,
|
||||
// message: `Failed to run query due to ${dbServer[0].value} not being reachable.`,
|
||||
// data: [],
|
||||
// };
|
||||
// }
|
||||
|
||||
if (!connected) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"server",
|
||||
`The sql ${dbServer[0].value} is not connected`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: `The sql ${dbServer[0].value} is not not connected`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
/**
|
||||
* We no longer need to send over the plant token change as we do it inside the query function.
|
||||
*/
|
||||
// const plantToken = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "plantToken"));
|
||||
const plantToken = serverSettings.filter(
|
||||
(n: any) => n.name === "plantToken"
|
||||
) as any;
|
||||
const query = queryToRun.replaceAll("test1", plantToken[0].value);
|
||||
if (!connected) {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"server",
|
||||
`The sql ${dbServer[0].value} is not connected`,
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await pool.request().query(query);
|
||||
return {
|
||||
success: false,
|
||||
message: `The sql ${dbServer[0].value} is not not connected`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
/**
|
||||
* We no longer need to send over the plant token change as we do it inside the query function.
|
||||
*/
|
||||
// const plantToken = await db
|
||||
// .select()
|
||||
// .from(settings)
|
||||
// .where(eq(settings.name, "plantToken"));
|
||||
const plantToken = serverSettings.filter(
|
||||
(n: any) => n.name === "plantToken",
|
||||
) as any;
|
||||
const query = queryToRun.replaceAll("test1", plantToken[0].value);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Query results for: ${name}`,
|
||||
data: result.recordset,
|
||||
};
|
||||
} catch (error: any) {
|
||||
if (error.code === "ETIMEOUT") {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(
|
||||
error
|
||||
)}, ${name} did not run due to a timeout.`
|
||||
);
|
||||
//throw new Error(`${name} query did not run due to a timeout.`);
|
||||
return {
|
||||
success: false,
|
||||
message: `${name} query did not run due to a timeout.`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
try {
|
||||
const result = await pool.request().query(query);
|
||||
|
||||
if (error.code === "EREQUEST") {
|
||||
// throw new Error(
|
||||
// `${name} encoutnered an error ${error.originalError.info.message}`
|
||||
// );
|
||||
return {
|
||||
success: false,
|
||||
message: `${name} encoutnered an error ${error.originalError.info.message}`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
message: `Query results for: ${name}`,
|
||||
data: result.recordset,
|
||||
};
|
||||
} catch (error: any) {
|
||||
if (error.code === "ETIMEOUT") {
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(error)}, ${name} did not run due to a timeout.`,
|
||||
);
|
||||
//throw new Error(`${name} query did not run due to a timeout.`);
|
||||
return {
|
||||
success: false,
|
||||
message: `${name} query did not run due to a timeout.`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
//console.log(error.originalError.info.message);
|
||||
//EREQUEST
|
||||
//throw new Error(`${name} encoutnered an error ${error.code}`);
|
||||
}
|
||||
if (error.code === "EREQUEST") {
|
||||
// throw new Error(
|
||||
// `${name} encoutnered an error ${error.originalError.info.message}`
|
||||
// );
|
||||
return {
|
||||
success: false,
|
||||
message: `${name} encoutnered an error ${error.originalError.info.message}`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
//console.log(error.originalError.info.message);
|
||||
//EREQUEST
|
||||
//throw new Error(`${name} encoutnered an error ${error.code}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +61,8 @@ V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PP' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PET' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PET-P'
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PET-P' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PET-G'
|
||||
THEN 'MM'
|
||||
WHEN
|
||||
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or
|
||||
|
||||
@@ -1,83 +1,8 @@
|
||||
export const deliveryByDateRange = `
|
||||
use AlplaPROD_test1
|
||||
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
|
||||
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
|
||||
select * from
|
||||
(select (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant,
|
||||
AuftragsNummer as OrderNumber,
|
||||
PositionsNummer as CustomerLineNumber,
|
||||
AbrufNummer as CustomerReleaseNumber,
|
||||
CONVERT(date, AbrufLiefertermin) as DeliveryDate,
|
||||
CONVERT(DATE,JournalDatum) Bol_PrintDate,
|
||||
AbrufMenge AS OrderQuantity,
|
||||
AbrufMengeVPK as OrderPallets,
|
||||
GelieferteMenge AS DeliveredQTY,
|
||||
GelieferteMengeVPK as DeliverdPallets,
|
||||
JournalNummer as BOLNum,
|
||||
ProdArtikelBez AS ProductFamily,
|
||||
dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdKundenAdresse AS IdCustomer,
|
||||
dbo.V_LadePlanungenLadeAuftragAbruf.AdressBez AS CustName,
|
||||
dbo.T_EAIJournal.IdJournalStatus as bolStatus,
|
||||
V_TrackerAuftragsAbrufe.IdAuftragsAbruf as releaseNum,
|
||||
V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag as truckPostion
|
||||
,dbo.V_TrackerAuftragsAbrufe.IdArtikelvarianten as av
|
||||
,dbo.V_TrackerAuftragsAbrufe.ArtikelVariantenAlias as alias
|
||||
,'Base Plant' as plantType
|
||||
from dbo.V_TrackerAuftragsAbrufe (nolock)
|
||||
|
||||
left join
|
||||
dbo.V_LadePlanungenLadeAuftragAbruf on V_TrackerAuftragsAbrufe.IdAuftragsAbruf =
|
||||
dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdAuftragsAbruf
|
||||
|
||||
left join
|
||||
dbo.T_EAIJournal on dbo.V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag =
|
||||
dbo.T_EAIJournal.IdLadeAuftrag
|
||||
|
||||
left join
|
||||
dbo.V_ArtikelKomplett on V_TrackerAuftragsAbrufe.IdArtikelVarianten =
|
||||
dbo.V_ArtikelKomplett.IdArtikelvarianten
|
||||
where GelieferteMengeVPK > 0 AND (
|
||||
AbrufLiefertermin IS NULL
|
||||
OR CONVERT(date, JournalDatum) BETWEEN @StartDate AND @EndDate
|
||||
)
|
||||
|
||||
/*in house*/
|
||||
union all
|
||||
|
||||
select top (50) (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant
|
||||
,[KundenAuftragsNummer] as OrderNumber
|
||||
,[KundenPositionsNummer] as CustomerLineNumber
|
||||
, null as CustomerReleaseNumber
|
||||
,CONVERT(date, i.Add_Date) as DeliveryDate
|
||||
,CONVERT(DATE,i.Upd_Date) Bol_PrintDate
|
||||
,null AS OrderQuantity
|
||||
,null as OrderPallets
|
||||
,LieferMengeVereinbart AS DeliveredQTY
|
||||
,null as DeliverdPallets
|
||||
,JournalNummer as BOLNum
|
||||
,null AS ProductFamily
|
||||
,IdAdresse AS IdCustomer
|
||||
,null AS CustName
|
||||
,null as bolStatus
|
||||
,null as releaseNum
|
||||
,null as truckPostion
|
||||
,i.IdArtikelVariante as av
|
||||
,null as alias
|
||||
,'In-House' as plantType
|
||||
--,*
|
||||
|
||||
from [dbo].[T_InhouseLieferungen] as i (nolock)
|
||||
|
||||
where CONVERT(date, Upd_Date) BETWEEN @StartDate AND @EndDate
|
||||
) x
|
||||
|
||||
|
||||
order by Bol_PrintDate desc
|
||||
`;
|
||||
|
||||
export const deliveryByDateRangeAndAv = `
|
||||
use [test1_AlplaPROD2.0_Read]
|
||||
|
||||
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
|
||||
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
|
||||
SELECT
|
||||
r.[ArticleHumanReadableId]
|
||||
,[ReleaseNumber]
|
||||
@@ -86,12 +11,15 @@ r.[ArticleHumanReadableId]
|
||||
,[CustomerReleaseNumber]
|
||||
,[ReleaseState]
|
||||
,[DeliveryState]
|
||||
,ea.JournalNummer
|
||||
,ea.JournalNummer as BOL_Number
|
||||
,[ReleaseConfirmationState]
|
||||
,[PlanningState]
|
||||
,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
|
||||
,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
|
||||
,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
|
||||
--,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
|
||||
,r.[OrderDate]
|
||||
--,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
|
||||
,r.[DeliveryDate]
|
||||
--,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
|
||||
,r.[LoadingDate]
|
||||
,[Quantity]
|
||||
,[DeliveredQuantity]
|
||||
,r.[AdditionalInformation1]
|
||||
@@ -108,6 +36,161 @@ r.[ArticleHumanReadableId]
|
||||
,[Irradiated]
|
||||
,r.[CreatedByEdi]
|
||||
,[DeliveryAddressHumanReadableId]
|
||||
,DeliveryAddressDescription
|
||||
,[CustomerArtNo]
|
||||
,[TotalPrice]
|
||||
,r.[ArticleAlias]
|
||||
|
||||
FROM [order].[Release] (nolock) as r
|
||||
|
||||
left join
|
||||
[order].LineItem as x on
|
||||
|
||||
r.LineItemId = x.id
|
||||
|
||||
left join
|
||||
[order].Header as h on
|
||||
x.HeaderId = h.id
|
||||
|
||||
--bol stuff
|
||||
left join
|
||||
AlplaPROD_test1.dbo.V_LadePlanungenLadeAuftragAbruf (nolock) as zz
|
||||
on zz.AbrufIdAuftragsAbruf = r.ReleaseNumber
|
||||
|
||||
left join
|
||||
(select * from (SELECT
|
||||
ROW_NUMBER() OVER (PARTITION BY IdJournal ORDER BY add_date DESC) AS RowNum
|
||||
,*
|
||||
FROM [AlplaPROD_test1].[dbo].[T_Lieferungen] (nolock)) x
|
||||
|
||||
where RowNum = 1) as ea on
|
||||
zz.IdLieferschein = ea.IdJournal
|
||||
|
||||
where
|
||||
--r.ArticleHumanReadableId in ([articles])
|
||||
--r.ReleaseNumber = 1452
|
||||
|
||||
r.DeliveryDate between @StartDate AND @EndDate
|
||||
and DeliveredQuantity > 0
|
||||
--and Journalnummer = 169386
|
||||
|
||||
`;
|
||||
|
||||
// export const deliveryByDateRange = `
|
||||
// use AlplaPROD_test1
|
||||
// DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
|
||||
// DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
|
||||
// select * from
|
||||
// (select (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant,
|
||||
// AuftragsNummer as OrderNumber,
|
||||
// PositionsNummer as CustomerLineNumber,
|
||||
// AbrufNummer as CustomerReleaseNumber,
|
||||
// CONVERT(date, AbrufLiefertermin) as DeliveryDate,
|
||||
// CONVERT(DATE,JournalDatum) Bol_PrintDate,
|
||||
// AbrufMenge AS OrderQuantity,
|
||||
// AbrufMengeVPK as OrderPallets,
|
||||
// GelieferteMenge AS DeliveredQTY,
|
||||
// GelieferteMengeVPK as DeliverdPallets,
|
||||
// JournalNummer as BOLNum,
|
||||
// ProdArtikelBez AS ProductFamily,
|
||||
// dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdKundenAdresse AS IdCustomer,
|
||||
// dbo.V_LadePlanungenLadeAuftragAbruf.AdressBez AS CustName,
|
||||
// dbo.T_EAIJournal.IdJournalStatus as bolStatus,
|
||||
// V_TrackerAuftragsAbrufe.IdAuftragsAbruf as releaseNum,
|
||||
// V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag as truckPostion
|
||||
// ,dbo.V_TrackerAuftragsAbrufe.IdArtikelvarianten as av
|
||||
// ,dbo.V_TrackerAuftragsAbrufe.ArtikelVariantenAlias as alias
|
||||
// ,'Base Plant' as plantType
|
||||
// from dbo.V_TrackerAuftragsAbrufe (nolock)
|
||||
|
||||
// left join
|
||||
// dbo.V_LadePlanungenLadeAuftragAbruf on V_TrackerAuftragsAbrufe.IdAuftragsAbruf =
|
||||
// dbo.V_LadePlanungenLadeAuftragAbruf.AbrufIdAuftragsAbruf
|
||||
|
||||
// left join
|
||||
// dbo.T_EAIJournal on dbo.V_LadePlanungenLadeAuftragAbruf.IdLadeAuftrag =
|
||||
// dbo.T_EAIJournal.IdLadeAuftrag
|
||||
|
||||
// left join
|
||||
// dbo.V_ArtikelKomplett on V_TrackerAuftragsAbrufe.IdArtikelVarianten =
|
||||
// dbo.V_ArtikelKomplett.IdArtikelvarianten
|
||||
// where GelieferteMengeVPK > 0 AND (
|
||||
// AbrufLiefertermin IS NULL
|
||||
// OR CONVERT(date, JournalDatum) BETWEEN @StartDate AND @EndDate
|
||||
// )
|
||||
|
||||
// /*in house*/
|
||||
// union all
|
||||
|
||||
// select top (50) (select wert from dbo.T_SystemParameter where Bezeichnung = 'Werkskuerzel') as Plant
|
||||
// ,[KundenAuftragsNummer] as OrderNumber
|
||||
// ,[KundenPositionsNummer] as CustomerLineNumber
|
||||
// , null as CustomerReleaseNumber
|
||||
// ,CONVERT(date, i.Add_Date) as DeliveryDate
|
||||
// ,CONVERT(DATE,i.Upd_Date) Bol_PrintDate
|
||||
// ,null AS OrderQuantity
|
||||
// ,null as OrderPallets
|
||||
// ,LieferMengeVereinbart AS DeliveredQTY
|
||||
// ,null as DeliverdPallets
|
||||
// ,JournalNummer as BOLNum
|
||||
// ,null AS ProductFamily
|
||||
// ,IdAdresse AS IdCustomer
|
||||
// ,null AS CustName
|
||||
// ,null as bolStatus
|
||||
// ,null as releaseNum
|
||||
// ,null as truckPostion
|
||||
// ,i.IdArtikelVariante as av
|
||||
// ,null as alias
|
||||
// ,'In-House' as plantType
|
||||
// --,*
|
||||
|
||||
// from [dbo].[T_InhouseLieferungen] as i (nolock)
|
||||
|
||||
// where CONVERT(date, Upd_Date) BETWEEN @StartDate AND @EndDate
|
||||
// ) x
|
||||
|
||||
// order by Bol_PrintDate desc
|
||||
// `;
|
||||
|
||||
export const deliveryByDateRangeAndAv = `
|
||||
use [test1_AlplaPROD2.0_Read]
|
||||
|
||||
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
|
||||
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
|
||||
SELECT
|
||||
r.[ArticleHumanReadableId]
|
||||
,[ReleaseNumber]
|
||||
,h.CustomerOrderNumber
|
||||
,x.CustomerLineItemNumber
|
||||
,[CustomerReleaseNumber]
|
||||
,[ReleaseState]
|
||||
,[DeliveryState]
|
||||
,ea.JournalNummer as BOL_Number
|
||||
,[ReleaseConfirmationState]
|
||||
,[PlanningState]
|
||||
--,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
|
||||
,r.[OrderDate]
|
||||
--,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
|
||||
,r.[DeliveryDate]
|
||||
--,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
|
||||
,r.[LoadingDate]
|
||||
,[Quantity]
|
||||
,[DeliveredQuantity]
|
||||
,r.[AdditionalInformation1]
|
||||
,r.[AdditionalInformation2]
|
||||
,[TradeUnits]
|
||||
,[LoadingUnits]
|
||||
,[Trucks]
|
||||
,[LoadingToleranceType]
|
||||
,[SalesPrice]
|
||||
,[Currency]
|
||||
,[QuantityUnit]
|
||||
,[SalesPriceRemark]
|
||||
,r.[Remark]
|
||||
,[Irradiated]
|
||||
,r.[CreatedByEdi]
|
||||
,[DeliveryAddressHumanReadableId]
|
||||
,DeliveryAddressDescription
|
||||
,[CustomerArtNo]
|
||||
,[TotalPrice]
|
||||
,r.[ArticleAlias]
|
||||
@@ -141,8 +224,8 @@ where
|
||||
r.ArticleHumanReadableId in ([articles])
|
||||
--r.ReleaseNumber = 1452
|
||||
|
||||
and r.DeliveryDate between '[startDate]' and '[endDate]'
|
||||
|
||||
order by DeliveryDate desc
|
||||
and r.DeliveryDate between @StartDate AND @EndDate
|
||||
--and DeliveredQuantity > 0
|
||||
--and Journalnummer = 169386
|
||||
|
||||
`;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export const orderState = `
|
||||
SELECT top(10000)
|
||||
SELECT
|
||||
CustomerOrderNumber
|
||||
,r.CustomerReleaseNumber
|
||||
, OrderState
|
||||
@@ -21,6 +21,6 @@ CustomerOrderNumber
|
||||
|
||||
where
|
||||
--h.CreatedByEdi = 1
|
||||
r.ReleaseState > 0
|
||||
r.ReleaseState >= 1
|
||||
--and CustomerOrderNumber in ( '2358392')
|
||||
`;
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
use [test1_AlplaPROD2.0_Read]
|
||||
|
||||
select
|
||||
humanreadableId as addressId
|
||||
,ContactEmail as email
|
||||
,ContactName
|
||||
,ContactPhoneNumber
|
||||
,Name
|
||||
,Street
|
||||
,City
|
||||
,ZipCode
|
||||
--,*
|
||||
from [masterData].[Address] (nolock)
|
||||
where humanreadableid = [customerAddress]
|
||||
@@ -0,0 +1,43 @@
|
||||
use AlplaPROD_test1
|
||||
|
||||
|
||||
/**
|
||||
check if we have any new alpla bols that were created
|
||||
*/
|
||||
|
||||
SELECT
|
||||
x.idladeplanung
|
||||
,e.idjournal
|
||||
,e.journalNummer as bol
|
||||
,e.idjournalstatus
|
||||
,e.ladeDatum as loadDate
|
||||
,e.bemerkung
|
||||
,e.ereporting_idwerk
|
||||
,e.journalDatum
|
||||
,a.idadressen as addressId
|
||||
,a.bezeichnung as addressDescription
|
||||
,a.strasse as streetAddress
|
||||
,a.ort as cityState
|
||||
,a.plz as zipcode
|
||||
,idauftrag as releaseNumber
|
||||
--,*
|
||||
FROM [dbo].[T_EAIJournal] as e with (nolock)
|
||||
|
||||
|
||||
-- pull in the address so we only pull in florence data
|
||||
left join
|
||||
[dbo].[T_EAIJournalAdresse] as a with (nolock) on
|
||||
a.[IdJournalAdresse] = [IdJournalKundenAdresse]
|
||||
|
||||
-- get the table to link the pallets to the bol
|
||||
left join
|
||||
[dbo].[T_EAIJournalPosition] as x with (nolock) on
|
||||
x.idjournal = e.idjournal
|
||||
|
||||
where idjournalStatus = 62
|
||||
--and idadressen = 270
|
||||
and a.bezeichnung like '%Alpla%' -- we only want to monitor for addresses that are linked to alpla.
|
||||
and JournalDatum > DATEADD(MINUTE, -[timeCheck], GETDATE())
|
||||
and e.journalNummer not in ([ignoreBols])
|
||||
and idauftrag > 1 -- this will ignore all incoming goodsv as we are really only looking for outbound deliveries
|
||||
order by JournalDatum desc
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
checks the age of an inventory dose not exceed x time
|
||||
*/
|
||||
|
||||
use AlplaPROD_test1
|
||||
DECLARE @timeCheck INT = [timeTest]
|
||||
|
||||
select
|
||||
w.IdWarenLager as idWarehouse
|
||||
,w.KurzBezeichnung as warehouse
|
||||
,b.IdLagerAbteilung as locationId
|
||||
,x.KurzBezeichnung as 'location'
|
||||
--,case when b.upd_date < Dateadd(minute, -(@timeCheck * 1.5), getdate()) then 'OVERDUE' else 'In-Progress' end as invStatus
|
||||
,format(b.Upd_Date, 'M/d/yyyy HH:mm') as cycleCountStartAt
|
||||
,b.Upd_User as blockedBy
|
||||
|
||||
--,*
|
||||
|
||||
from [dbo].[V_LagerAbteilungenInventuren] (nolock) as b
|
||||
|
||||
-- get the loction name
|
||||
left join
|
||||
dbo.T_LagerAbteilungen (nolock) as x
|
||||
on x.IdLagerAbteilung = b.IdLagerAbteilung
|
||||
|
||||
-- get the whse
|
||||
left join
|
||||
dbo.T_WarenLager (nolock) as w
|
||||
on x.idWarenLager = w.idWarenLager
|
||||
|
||||
|
||||
where status = 1
|
||||
and b.Upd_Date < Dateadd(minute, -@timeCheck, getdate())
|
||||
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
disables sql jobs.
|
||||
*/
|
||||
EXEC msdb.dbo.sp_update_job @job_name = N'[jobName]', @enabled = 0;
|
||||
-- DECLARE @JobName varchar(max) = '[jobName]'
|
||||
-- UPDATE msdb.dbo.sysjobs
|
||||
-- SET enabled = 0
|
||||
-- WHERE name = @JobName;
|
||||
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
This query will return a single running number as long as its in stock.
|
||||
|
||||
To get all data comment out the lfdnr in the where statmen
|
||||
*/
|
||||
use AlplaPROD_test1
|
||||
|
||||
DECLARE @runningNumber nvarchar(max) = '[runningNr]' -- when saving in lst should be '[runningNr]'
|
||||
|
||||
select x.idartikelVarianten as av,
|
||||
ArtikelVariantenAlias as alias,
|
||||
x.Lfdnr as runningNumber,
|
||||
round(sum(EinlagerungsMengeVPKSum),0) as totalPallets,
|
||||
sum(EinlagerungsMengeSum) as totalPalletQTY,
|
||||
round(sum(VerfuegbareMengeVPKSum),0) as avaliblePallets,
|
||||
sum(VerfuegbareMengeSum) as avaliablePalletQTY,
|
||||
sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as coaPallets,
|
||||
sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as coaQTY,
|
||||
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as heldPallets,
|
||||
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as heldQTY
|
||||
,IdProdPlanung as lot
|
||||
,IdAdressen as addressID,
|
||||
x.AdressBez as addressDescription
|
||||
,x.IdLagerAbteilung as locationId
|
||||
,x.lagerabteilungkurzbez as location
|
||||
,lot.machine
|
||||
,produktionsdatummin as productionDate
|
||||
,'728'
|
||||
+ RIGHT(CAST(YEAR(produktionsdatummin) AS varchar(4)), 1)
|
||||
+ CAST(DATEDIFF(DAY, DATEFROMPARTS(YEAR(produktionsdatummin), 1, 1), produktionsdatummin) + 1 AS varchar(3))
|
||||
+ CAST(lot.machine AS varchar(10)) as batch
|
||||
,c.Description as blockingReason
|
||||
,x.Barcode as barcode
|
||||
--,*
|
||||
from dbo.[V_LagerPositionenBarcodes] (nolock) x
|
||||
|
||||
left join
|
||||
dbo.T_EtikettenGedruckt as l(nolock) on
|
||||
x.Lfdnr = l.Lfdnr AND l.Lfdnr > 1
|
||||
|
||||
left join
|
||||
|
||||
(SELECT *
|
||||
FROM [dbo].[T_BlockingDefects] where Active = 1) as c
|
||||
on x.IdMainDefect = c.IdBlockingDefect
|
||||
|
||||
/*
|
||||
get lot and machine info
|
||||
*/
|
||||
left join
|
||||
(select location as machine,
|
||||
runningnumber as lot
|
||||
,planstart
|
||||
,planend
|
||||
from [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] (nolock) x
|
||||
|
||||
left join
|
||||
[test1_AlplaPROD2.0_Read].[masterData].[Machine] (nolock) m on
|
||||
m.id = x.machineid) as lot on
|
||||
lot.lot = IdProdPlanung
|
||||
/*
|
||||
The data below will be controlled by the user in excel by default everything will be passed over
|
||||
IdAdressen = 3
|
||||
*/
|
||||
where IdArtikelTyp = 1
|
||||
and x.IdWarenlager in (1) -- the pallet must be in ppoo
|
||||
and x.Lfdnr = @runningNumber -- comment this out when you want to get everything
|
||||
|
||||
group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description, IdAdressen,
|
||||
x.AdressBez , x.Lfdnr,
|
||||
IdProdPlanung
|
||||
,x.IdLagerAbteilung
|
||||
,x.lagerabteilungkurzbez
|
||||
,lot.machine
|
||||
,produktionsdatummin
|
||||
,x.Barcode
|
||||
|
||||
order by x.IdArtikelVarianten
|
||||
@@ -0,0 +1,37 @@
|
||||
use AlplaPROD_test1
|
||||
|
||||
select * from (SELECT
|
||||
p.[IdLadePlanung]
|
||||
,p.[Beleg] as lotNumber
|
||||
,p.[LfdNrJeArtikelKunde] as runningNr
|
||||
,p.[Barcode]
|
||||
,p.[ProduktionsDatum] as productionDate
|
||||
,p.[Add_User] as scanDate
|
||||
,p.[Add_Date]
|
||||
,p.[Upd_User]
|
||||
,p.[Upd_Date]
|
||||
,p.[IdJournalWarenPosition]
|
||||
,p.[LieferMenge] as qty
|
||||
-- ,av.IdArtikelvarianten as article
|
||||
-- ,av.Bezeichnung as alias
|
||||
,av.articlehumanreadableid as article
|
||||
,av.ArticleDescription as alias
|
||||
--,[SSCC_ReserveZiffer]
|
||||
--,ROW_NUMBER() OVER (PARTITION BY p.[LfdNrJeArtikelKunde] ORDER BY p.upd_date DESC) AS RowNum
|
||||
--,*
|
||||
|
||||
FROM [dbo].[T_EAIJournalLieferPosition] as p (nolock)
|
||||
|
||||
-- left join
|
||||
-- dbo.T_ProdPlanung as l on
|
||||
-- l.IdProdPlanung = p.Beleg
|
||||
|
||||
left join
|
||||
[test1_AlplaPROD2.0_Read].labelling.InternalLabel as av on
|
||||
av.RunningNumber = p.[LfdNrJeArtikelKunde]
|
||||
) as a
|
||||
|
||||
where idladeplanung in ([palLinkedToBol])
|
||||
--and RowNum = 1
|
||||
|
||||
order by runningNr
|
||||
@@ -157,3 +157,259 @@ where lot.ProductionLotHumanReadableId = @lot and MaterialDescription not like '
|
||||
and MaterialDescription NOT LIKE '%bb%'
|
||||
and MaterialDescription NOT LIKE '%mcg%'
|
||||
`;
|
||||
|
||||
export const something = [
|
||||
{
|
||||
MaterialHumanReadableId: 98,
|
||||
MaterialDescription: "BAN Banding 51544 1cyc",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 352,
|
||||
totalNeeded: 66.2904,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: "pkgAutoGood",
|
||||
noPKGManualShortage: "noManPkg",
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 1314200,
|
||||
Percentage: 0,
|
||||
QuantityPosition: 4,
|
||||
"": null,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 174,
|
||||
MaterialDescription: "MB PE Ampacet BW 11744",
|
||||
Staged: 0,
|
||||
isManual: true,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 162.728623,
|
||||
totalNeeded: 1.814699,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: null,
|
||||
noPKGManualShortage: null,
|
||||
noManualShortage: "noOK",
|
||||
autoConsumeCheck: "autoConsumeNOK",
|
||||
invForAutoConsume: null,
|
||||
Percentage: 2.19,
|
||||
QuantityPosition: null,
|
||||
"": 0,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 99,
|
||||
MaterialDescription: "TOP Plastic 6040643 44x56x4 w/o CB 30cyc",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 88,
|
||||
totalNeeded: 66.2904,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: "pkgAutoGood",
|
||||
noPKGManualShortage: "noManPkg",
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 2048,
|
||||
Percentage: 0,
|
||||
QuantityPosition: 1,
|
||||
"": null,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 119,
|
||||
MaterialDescription: "MM HDPE PCR KW Plastics KWR 101-150",
|
||||
Staged: 1,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 53643.717,
|
||||
consumption: 0,
|
||||
totalDemand: 3744.977905,
|
||||
totalNeeded: 41.762952,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: null,
|
||||
noPKGManualShortage: null,
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 53754.112,
|
||||
Percentage: 50.4,
|
||||
QuantityPosition: null,
|
||||
"": 0,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 504,
|
||||
MaterialDescription: "LBL IML Label F 1.8L Evolution 1265677",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 79357.090909,
|
||||
totalNeeded: 1,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: null,
|
||||
noPKGManualShortage: null,
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 900500,
|
||||
Percentage: 0,
|
||||
QuantityPosition: null,
|
||||
"": 1,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 176,
|
||||
MaterialDescription: "MM HDPE Dow DMDF 6230",
|
||||
Staged: 1,
|
||||
isManual: false,
|
||||
IsMainMaterial: true,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 74063.734,
|
||||
consumption: 0,
|
||||
totalDemand: 3522.805744,
|
||||
totalNeeded: 39.285348,
|
||||
noMMShortage: "mmGood",
|
||||
noPKGAutoShortage: null,
|
||||
noPKGManualShortage: null,
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 182624.771,
|
||||
Percentage: 47.41,
|
||||
QuantityPosition: null,
|
||||
"": 0,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 397,
|
||||
MaterialDescription: "STW Film 20x45ga 180567",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 17.6,
|
||||
totalNeeded: 66.2904,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: "pkgAutoGood",
|
||||
noPKGManualShortage: "noManPkg",
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 1063.92,
|
||||
Percentage: 0,
|
||||
QuantityPosition: 0.2,
|
||||
"": null,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 96,
|
||||
MaterialDescription: "PAL PRA 44x56x5 50cyc",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 88,
|
||||
totalNeeded: 66.2904,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: "pkgAutoGood",
|
||||
noPKGManualShortage: "noManPkg",
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 1529,
|
||||
Percentage: 0,
|
||||
QuantityPosition: 1,
|
||||
"": null,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 505,
|
||||
MaterialDescription: "LBL IML Label B 1.8L Evolution 1265678",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 79357.090909,
|
||||
totalNeeded: 1,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: null,
|
||||
noPKGManualShortage: null,
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 903000,
|
||||
Percentage: 0,
|
||||
QuantityPosition: null,
|
||||
"": 1,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 97,
|
||||
MaterialDescription: "SSH Plastic 48100349 44x56x0.06 30cyc",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 792,
|
||||
totalNeeded: 66.2904,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: "pkgAutoGood",
|
||||
noPKGManualShortage: "noManPkg",
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 29962,
|
||||
Percentage: 0,
|
||||
QuantityPosition: 9,
|
||||
"": null,
|
||||
},
|
||||
{
|
||||
MaterialHumanReadableId: 169,
|
||||
MaterialDescription: "LBL Label 4x6 white 9396",
|
||||
Staged: 0,
|
||||
isManual: false,
|
||||
IsMainMaterial: false,
|
||||
TotalPlannedLoadingUnits: 88,
|
||||
TotalProducedLoadingUnits: 0,
|
||||
remainingPallets: 88,
|
||||
Provided: 0,
|
||||
consumption: 0,
|
||||
totalDemand: 264,
|
||||
totalNeeded: 66.2904,
|
||||
noMMShortage: null,
|
||||
noPKGAutoShortage: "pkgAutoGood",
|
||||
noPKGManualShortage: "noManPkg",
|
||||
noManualShortage: null,
|
||||
autoConsumeCheck: "autoConsumeOk",
|
||||
invForAutoConsume: 55637,
|
||||
Percentage: 0,
|
||||
QuantityPosition: 3,
|
||||
"": null,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export const forecastData = `
|
||||
SELECT format(cast(RequirementDate as date),'M/d/yyyy') as requirementDate
|
||||
SELECT RequirementDate as requirementDate
|
||||
,ArticleHumanReadableId
|
||||
,CustomerArticleNumber
|
||||
,ArticleDescription
|
||||
|
||||
@@ -1,34 +1,34 @@
|
||||
export const planningNumbersByAVDate = `
|
||||
use AlplaPROD_test1
|
||||
declare @start_date nvarchar(30) = '[startDate]' --'2025-01-01'
|
||||
declare @end_date nvarchar(30) = '[endDate]' --'2025-08-09'
|
||||
/*
|
||||
articles will need to be passed over as well as the date structure we want to see
|
||||
*/
|
||||
use AlplaPROD_test1
|
||||
declare @start_date nvarchar(30) = '[startDate]' --'2025-01-01'
|
||||
declare @end_date nvarchar(30) = '[endDate]' --'2025-08-09'
|
||||
/*
|
||||
articles will need to be passed over as well as the date structure we want to see
|
||||
*/
|
||||
|
||||
select x.IdArtikelvarianten As Article,
|
||||
ProduktionAlias as Description,
|
||||
standort as MachineId,
|
||||
MaschinenBezeichnung as MachineName,
|
||||
--MaschZyklus as PlanningCycleTime,
|
||||
x.IdProdPlanung as LotNumber,
|
||||
FORMAT(ProdTag, 'MM/dd/yyyy') as ProductionDay,
|
||||
x.planMenge as TotalPlanned,
|
||||
ProduktionMenge as QTYPerDay,
|
||||
round(ProduktionMengeVPK, 2) PalDay,
|
||||
Status as finished
|
||||
--MaschStdAuslastung as nee
|
||||
select x.IdArtikelvarianten As Article,
|
||||
ProduktionAlias as Description,
|
||||
standort as MachineId,
|
||||
MaschinenBezeichnung as MachineName,
|
||||
--MaschZyklus as PlanningCycleTime,
|
||||
x.IdProdPlanung as LotNumber,
|
||||
FORMAT(ProdTag, 'MM/dd/yyyy') as ProductionDay,
|
||||
x.planMenge as TotalPlanned,
|
||||
ProduktionMenge as QTYPerDay,
|
||||
round(ProduktionMengeVPK, 2) PalDay,
|
||||
Status as finished
|
||||
--MaschStdAuslastung as nee
|
||||
|
||||
from dbo.V_ProdLosProduktionJeProdTag_PLANNING (nolock) as x
|
||||
from dbo.V_ProdLosProduktionJeProdTag_PLANNING (nolock) as x
|
||||
|
||||
left join
|
||||
dbo.V_ProdPlanung (nolock) as p on
|
||||
x.IdProdPlanung = p.IdProdPlanung
|
||||
left join
|
||||
dbo.V_ProdPlanung (nolock) as p on
|
||||
x.IdProdPlanung = p.IdProdPlanung
|
||||
|
||||
where ProdTag between @start_date and @end_date
|
||||
and p.IdArtikelvarianten in ([articles])
|
||||
--and V_ProdLosProduktionJeProdTag_PLANNING.IdKunde = 10
|
||||
--and IdProdPlanung = 18442
|
||||
where ProdTag between @start_date and @end_date
|
||||
and p.IdArtikelvarianten in ([articles])
|
||||
--and V_ProdLosProduktionJeProdTag_PLANNING.IdKunde = 10
|
||||
--and IdProdPlanung = 18442
|
||||
|
||||
order by ProdTag desc
|
||||
order by ProdTag desc
|
||||
`;
|
||||
|
||||
@@ -29,7 +29,7 @@ left join
|
||||
alplaprod_test1.dbo.V_LagerPositionenBarcodes (nolock) as l on
|
||||
ext.RunningNumber = l.Lfdnr
|
||||
|
||||
WHERE ext.SsccEanRunningNumber IN (@runningNumber) and
|
||||
WHERE ext.RunningNumber IN (@runningNumber) and
|
||||
ext.RunningNumber NOT IN (
|
||||
SELECT RunningNumber FROM [test1_AlplaPROD2.0_Read].[labelling].[InternalLabel] WHERE RunningNumber IN (@runningNumber)
|
||||
)
|
||||
|
||||
@@ -4,48 +4,47 @@ import { createLog } from "../../logger/logger.js";
|
||||
import { serverSettings } from "../../server/controller/settings/getSettings.js";
|
||||
|
||||
export const prodSqlConfig = async () => {
|
||||
try {
|
||||
//const serverSetting = await db.select().from(settings);
|
||||
const serverSetting = serverSettings as any;
|
||||
// create dummy type data
|
||||
const server = serverSetting.filter((s: any) => s.name === "dbServer");
|
||||
const plantToken = serverSetting.filter(
|
||||
(s: any) => s.name === "plantToken"
|
||||
);
|
||||
const dbUser = serverSetting.filter((s: any) => s.name === "dbUser");
|
||||
// if erroring out double check the password was actually encoded before saving
|
||||
const dbPassword = serverSetting.filter(
|
||||
(s: any) => s.name === "dbPass"
|
||||
);
|
||||
try {
|
||||
//const serverSetting = await db.select().from(settings);
|
||||
const serverSetting = serverSettings as any;
|
||||
// create dummy type data
|
||||
const server = serverSetting.filter((s: any) => s.name === "dbServer");
|
||||
const plantToken = serverSetting.filter(
|
||||
(s: any) => s.name === "plantToken",
|
||||
);
|
||||
const dbUser = serverSetting.filter((s: any) => s.name === "dbUser");
|
||||
// if erroring out double check the password was actually encoded before saving
|
||||
const dbPassword = serverSetting.filter((s: any) => s.name === "dbPass");
|
||||
|
||||
const sqlConfig = {
|
||||
server: server[0].value,
|
||||
database: `AlplaPROD_${plantToken[0].value}_cus`,
|
||||
user: dbUser[0].value,
|
||||
password: atob(dbPassword[0].value),
|
||||
options: {
|
||||
encrypt: true,
|
||||
trustServerCertificate: true,
|
||||
},
|
||||
requestTimeout: 90000, // in milliseconds
|
||||
pool: {
|
||||
max: 20, // Maximum number of connections in the pool
|
||||
min: 0, // Minimum number of connections in the pool
|
||||
idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released
|
||||
reapIntervalMillis: 1000, // how often to check for idle resourses to destory
|
||||
acquireTimeoutMillis: 100000, // How long until a complete timeout happens
|
||||
},
|
||||
};
|
||||
const sqlConfig = {
|
||||
server:
|
||||
process.env.NODE_ENV !== "development" ? "localhost" : server[0].value,
|
||||
database: `AlplaPROD_${plantToken[0].value}_cus`,
|
||||
user: dbUser[0].value,
|
||||
password: atob(dbPassword[0].value),
|
||||
options: {
|
||||
encrypt: true,
|
||||
trustServerCertificate: true,
|
||||
},
|
||||
requestTimeout: 90000, // in milliseconds
|
||||
pool: {
|
||||
max: 20, // Maximum number of connections in the pool
|
||||
min: 0, // Minimum number of connections in the pool
|
||||
idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released
|
||||
reapIntervalMillis: 1000, // how often to check for idle resourses to destory
|
||||
acquireTimeoutMillis: 100000, // How long until a complete timeout happens
|
||||
},
|
||||
};
|
||||
|
||||
return sqlConfig;
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(
|
||||
error
|
||||
)} "There was an error getting/setting up the config for the prod sql server."`
|
||||
);
|
||||
}
|
||||
return sqlConfig;
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"sqlProd",
|
||||
`${JSON.stringify(
|
||||
error,
|
||||
)} "There was an error getting/setting up the config for the prod sql server."`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
28
lstV2/server/services/sqlServer/utils/querySelector.utils.ts
Normal file
28
lstV2/server/services/sqlServer/utils/querySelector.utils.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
export type SqlQuery = {
|
||||
query: string;
|
||||
success: boolean;
|
||||
message: string;
|
||||
};
|
||||
|
||||
export const sqlQuerySelector = (name: string) => {
|
||||
try {
|
||||
const queryFile = readFileSync(
|
||||
new URL(`../querys/newQueries/${name}.sql`, import.meta.url),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Query for: ${name}`,
|
||||
query: queryFile,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"Error getting the query file, please make sure you have the correct name.",
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -10,7 +10,8 @@
|
||||
"dev:front": "cd frontend && npm run dev",
|
||||
"dev:db:migrate": "npx drizzle-kit push",
|
||||
"dev:db:generate": "tsc && npx drizzle-kit generate --config=drizzle-dev.config.ts",
|
||||
"dev": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"",
|
||||
"dev": "concurrently -n \"server,oldServer\" -c \"#007755, #1F73D1\" \"npm run dev:app\" \"npm run dev:old\"",
|
||||
"dev:all": "concurrently -n \"server,frontend,docs,oldServer\" -c \"#007755,#2f6da3,#DB4FE0, #1F73D1\" \"npm run dev:app\" \"npm run dev:front\" \"npm run dev:docs\" \"npm run dev:old\"",
|
||||
"dev:old": "cd lstV2 && npm run dev",
|
||||
"copy:docs": "node scripts/lstDocCopy.mjs",
|
||||
"build:app": "rimraf dist && npx tsc && node scripts/lstAppMoves.mjs",
|
||||
|
||||
112
scripts/FinanceProcess/bookInPalletsToLot.mjs
Normal file
112
scripts/FinanceProcess/bookInPalletsToLot.mjs
Normal file
@@ -0,0 +1,112 @@
|
||||
import net from "net";
|
||||
|
||||
/**
|
||||
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
|
||||
*/
|
||||
const scannerID = "98@";
|
||||
const scannerCommand = "Alplaprodcmd10"; // to consume all the pallets
|
||||
const lot = "AlplaPRODchg#00000016700"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it
|
||||
|
||||
const labels = [
|
||||
"1000000000000000000000000000000005512460",
|
||||
"1000000000000000000000000000000005512470",
|
||||
"1000000000000000000000000000000005512480",
|
||||
"1000000000000000000000000000000005512490",
|
||||
"1000000000000000000000000000000005512500",
|
||||
"1000000000000000000000000000000005512510",
|
||||
"1000000000000000000000000000000005512520",
|
||||
"1000000000000000000000000000000005512530",
|
||||
"1000000000000000000000000000000005512540",
|
||||
"1000000000000000000000000000000005512550",
|
||||
"1000000000000000000000000000000005512560",
|
||||
"1000000000000000000000000000000005512570",
|
||||
"1000000000000000000000000000000005512580",
|
||||
"1000000000000000000000000000000005512590",
|
||||
"1000000000000000000000000000000005512600",
|
||||
"1000000000000000000000000000000005512610",
|
||||
"1000000000000000000000000000000005512620",
|
||||
"1000000000000000000000000000000005512630",
|
||||
"1000000000000000000000000000000005512640",
|
||||
"1000000000000000000000000000000005512650",
|
||||
"1000000000000000000000000000000005512660",
|
||||
"1000000000000000000000000000000005512670",
|
||||
"1000000000000000000000000000000005512680",
|
||||
"1000000000000000000000000000000005512690",
|
||||
"1000000000000000000000000000000005512700",
|
||||
"1000000000000000000000000000000005512710",
|
||||
"1000000000000000000000000000000005512720",
|
||||
"1000000000000000000000000000000005512730",
|
||||
"1000000000000000000000000000000005512740",
|
||||
"1000000000000000000000000000000005512750",
|
||||
"1000000000000000000000000000000005512760",
|
||||
"1000000000000000000000000000000005512770",
|
||||
"1000000000000000000000000000000005512780",
|
||||
"1000000000000000000000000000000005512790",
|
||||
"1000000000000000000000000000000005512800",
|
||||
"1000000000000000000000000000000005512810",
|
||||
"1000000000000000000000000000000005512820",
|
||||
"1000000000000000000000000000000005512830",
|
||||
"1000000000000000000000000000000005512840",
|
||||
"1000000000000000000000000000000005512850",
|
||||
"1000000000000000000000000000000005512860",
|
||||
"1000000000000000000000000000000005512870",
|
||||
"1000000000000000000000000000000005512880",
|
||||
"1000000000000000000000000000000005512890",
|
||||
"1000000000000000000000000000000005512900",
|
||||
"1000000000000000000000000000000005512910",
|
||||
"1000000000000000000000000000000005512920",
|
||||
"1000000000000000000000000000000005512930",
|
||||
"1000000000000000000000000000000005512940",
|
||||
"1000000000000000000000000000000005512950",
|
||||
"1000000000000000000000000000000005512960",
|
||||
];
|
||||
const STX = "\x02";
|
||||
const ETX = "\x03";
|
||||
|
||||
const scanner = new net.Socket();
|
||||
|
||||
scanner.connect(50000, "10.204.0.26", async () => {
|
||||
console.log("Connected to scanner");
|
||||
|
||||
// change the scanner to the to 112
|
||||
let message = Buffer.from(
|
||||
`${STX}${scannerID}${scannerCommand}${ETX}`,
|
||||
"ascii",
|
||||
);
|
||||
console.log("Sending:", message.toString("ascii"));
|
||||
scanner.write(message);
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
// bookin all the pallets in the array
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
for (let i = 0; i < labels.length; i++) {
|
||||
const l = labels[i];
|
||||
|
||||
message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
|
||||
console.log("Sending:", message.toString("ascii"));
|
||||
scanner.write(message);
|
||||
await new Promise((resolve) => setTimeout(resolve, 1200));
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
scanner.destroy();
|
||||
});
|
||||
|
||||
scanner.on("data", async (data) => {
|
||||
console.log(
|
||||
"Response:",
|
||||
data
|
||||
.toString("ascii")
|
||||
.replace(/\x00/g, "") // remove null bytes
|
||||
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
|
||||
.trim(),
|
||||
);
|
||||
});
|
||||
|
||||
scanner.on("close", () => {
|
||||
console.log("Connection closed");
|
||||
});
|
||||
|
||||
scanner.on("error", (err) => {
|
||||
console.error("Scanner error:", err);
|
||||
});
|
||||
100
scripts/FinanceProcess/consumePalletsToLot copy.mjs
Normal file
100
scripts/FinanceProcess/consumePalletsToLot copy.mjs
Normal file
@@ -0,0 +1,100 @@
|
||||
import net from "net";
|
||||
|
||||
/**
|
||||
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
|
||||
*/
|
||||
const scannerID = "98@";
|
||||
const scannerCommand = "Alplaprodcmd112"; // to consume all the pallets
|
||||
const lot = "AlplaPRODchg#11601"; // to consume to the lot make sure its showing in 2.0 to be able to consume to it
|
||||
|
||||
const labels = [
|
||||
"1000000000000000000000000000000004551860",
|
||||
"1000000000000000000000000000000004551640",
|
||||
"1000000000000000000000000000000004551840",
|
||||
"1000000000000000000000000000000004551610",
|
||||
"1000000000000000000000000000000004551720",
|
||||
"1000000000000000000000000000000004551680",
|
||||
"1000000000000000000000000000000004551740",
|
||||
"1000000000000000000000000000000004551660",
|
||||
"1000000000000000000000000000000004551570",
|
||||
"1000000000000000000000000000000004551480",
|
||||
"1000000000000000000000000000000004551510",
|
||||
"1000000000000000000000000000000004551460",
|
||||
"1000000000000000000000000000000004551600",
|
||||
"1000000000000000000000000000000004551340",
|
||||
"1000000000000000000000000000000004551580",
|
||||
"1000000000000000000000000000000004551330",
|
||||
"1000000000000000000000000000000004551290",
|
||||
"1000000000000000000000000000000004551180",
|
||||
"1000000000000000000000000000000004551260",
|
||||
"1000000000000000000000000000000004551150",
|
||||
"1000000000000000000000000000000004551390",
|
||||
"1000000000000000000000000000000004551440",
|
||||
"1000000000000000000000000000000004551360",
|
||||
"1000000000000000000000000000000004551400",
|
||||
"1000000000000000000000000000000004544780",
|
||||
"1000000000000000000000000000000004551230",
|
||||
"1000000000000000000000000000000004544770",
|
||||
"1000000000000000000000000000000004551200",
|
||||
"1000000000000000000000000000000004544850",
|
||||
"1000000000000000000000000000000004548370",
|
||||
"1000000000000000000000000000000004544840",
|
||||
"1000000000000000000000000000000004548470",
|
||||
"1000000000000000000000000000000004611380",
|
||||
"1000000000000000000000000000000004611470",
|
||||
"1000000000000000000000000000000004611440",
|
||||
];
|
||||
const STX = "\x02";
|
||||
const ETX = "\x03";
|
||||
|
||||
const scanner = new net.Socket();
|
||||
|
||||
scanner.connect(50001, "10.80.0.26", async () => {
|
||||
console.log("Connected to scanner");
|
||||
|
||||
// change the scanner to the to 112
|
||||
let message = Buffer.from(
|
||||
`${STX}${scannerID}${scannerCommand}${ETX}`,
|
||||
"ascii",
|
||||
);
|
||||
console.log("Sending:", message.toString("ascii"));
|
||||
scanner.write(message);
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
// consume all the pallets in the array
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
for (let i = 0; i < labels.length; i++) {
|
||||
const l = labels[i];
|
||||
// trigger the lot
|
||||
let message = Buffer.from(`${STX}${scannerID}${lot}${ETX}`, "ascii");
|
||||
console.log("Sending:", message.toString("ascii"));
|
||||
scanner.write(message);
|
||||
|
||||
message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
|
||||
console.log("Sending:", message.toString("ascii"));
|
||||
scanner.write(message);
|
||||
await new Promise((resolve) => setTimeout(resolve, 1200));
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
scanner.destroy();
|
||||
});
|
||||
|
||||
scanner.on("data", async (data) => {
|
||||
console.log(
|
||||
"Response:",
|
||||
data
|
||||
.toString("ascii")
|
||||
.replace(/\x00/g, "") // remove null bytes
|
||||
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
|
||||
.trim(),
|
||||
);
|
||||
});
|
||||
|
||||
scanner.on("close", () => {
|
||||
console.log("Connection closed");
|
||||
});
|
||||
|
||||
scanner.on("error", (err) => {
|
||||
console.error("Scanner error:", err);
|
||||
});
|
||||
187
scripts/FinanceProcess/createBol.mjs
Normal file
187
scripts/FinanceProcess/createBol.mjs
Normal file
@@ -0,0 +1,187 @@
|
||||
import net from "net";
|
||||
|
||||
/**
|
||||
* This uses a kinda fake scanner to mimic the scanning process to a server and creates the bol.
|
||||
*/
|
||||
const prodIP = "10.204.0.26";
|
||||
const prodPort = 50000;
|
||||
const scannerID = "98@";
|
||||
const scannerCommand = "AlplaPRODcmd00000042#000028643"; // top of the picksheet
|
||||
const scannerCommand2 = ""; // bottom of the pick sheet
|
||||
|
||||
const labels = [
|
||||
"1000000000000000000000000000000005572620",
|
||||
"1000000000000000000000000000000005572630",
|
||||
"1000000000000000000000000000000005572640",
|
||||
"1000000000000000000000000000000005572650",
|
||||
"1000000000000000000000000000000005572660",
|
||||
"1000000000000000000000000000000005572670",
|
||||
"1000000000000000000000000000000005572680",
|
||||
"1000000000000000000000000000000005572690",
|
||||
"1000000000000000000000000000000005572700",
|
||||
"1000000000000000000000000000000005572710",
|
||||
"1000000000000000000000000000000005572720",
|
||||
"1000000000000000000000000000000005572730",
|
||||
"1000000000000000000000000000000005572740",
|
||||
"1000000000000000000000000000000005572750",
|
||||
"1000000000000000000000000000000005572760",
|
||||
"1000000000000000000000000000000005572770",
|
||||
"1000000000000000000000000000000005572780",
|
||||
"1000000000000000000000000000000005572790",
|
||||
"1000000000000000000000000000000005572800",
|
||||
"1000000000000000000000000000000005572810",
|
||||
"1000000000000000000000000000000005572820",
|
||||
"1000000000000000000000000000000005572830",
|
||||
"1000000000000000000000000000000005572840",
|
||||
"1000000000000000000000000000000005572850",
|
||||
"1000000000000000000000000000000005572860",
|
||||
"1000000000000000000000000000000005572870",
|
||||
"1000000000000000000000000000000005572880",
|
||||
"1000000000000000000000000000000005572890",
|
||||
"1000000000000000000000000000000005572900",
|
||||
"1000000000000000000000000000000005572910",
|
||||
"1000000000000000000000000000000005573226",
|
||||
"1000000000000000000000000000000005573236",
|
||||
"1000000000000000000000000000000005573246",
|
||||
"1000000000000000000000000000000005573256",
|
||||
"1000000000000000000000000000000005573266",
|
||||
"1000000000000000000000000000000005573276",
|
||||
"1000000000000000000000000000000005573286",
|
||||
"1000000000000000000000000000000005573296",
|
||||
"1000000000000000000000000000000005573306",
|
||||
"1000000000000000000000000000000005573316",
|
||||
"1000000000000000000000000000000005573326",
|
||||
"1000000000000000000000000000000005573336",
|
||||
"1000000000000000000000000000000005573346",
|
||||
"1000000000000000000000000000000005573356",
|
||||
"1000000000000000000000000000000005573366",
|
||||
"1000000000000000000000000000000005573376",
|
||||
"1000000000000000000000000000000005573386",
|
||||
"1000000000000000000000000000000005573396",
|
||||
"1000000000000000000000000000000005573406",
|
||||
"1000000000000000000000000000000005573416",
|
||||
"1000000000000000000000000000000005573426",
|
||||
"1000000000000000000000000000000005573436",
|
||||
"1000000000000000000000000000000005573446",
|
||||
"1000000000000000000000000000000005573456",
|
||||
"1000000000000000000000000000000005573466",
|
||||
"1000000000000000000000000000000005573476",
|
||||
"1000000000000000000000000000000005573486",
|
||||
"1000000000000000000000000000000005573496",
|
||||
"1000000000000000000000000000000005573506",
|
||||
"1000000000000000000000000000000005573516",
|
||||
"1000000000000000000000000000000005581616",
|
||||
"1000000000000000000000000000000005581626",
|
||||
"1000000000000000000000000000000005581636",
|
||||
"1000000000000000000000000000000005581646",
|
||||
"1000000000000000000000000000000005581656",
|
||||
"1000000000000000000000000000000005581666",
|
||||
"1000000000000000000000000000000005581676",
|
||||
"1000000000000000000000000000000005581686",
|
||||
"1000000000000000000000000000000005581696",
|
||||
"1000000000000000000000000000000005581706",
|
||||
"1000000000000000000000000000000005581716",
|
||||
"1000000000000000000000000000000005581726",
|
||||
"1000000000000000000000000000000005581736",
|
||||
"1000000000000000000000000000000005581746",
|
||||
"1000000000000000000000000000000005581756",
|
||||
"1000000000000000000000000000000005581766",
|
||||
"1000000000000000000000000000000005581776",
|
||||
"1000000000000000000000000000000005581786",
|
||||
"1000000000000000000000000000000005581796",
|
||||
"1000000000000000000000000000000005581806",
|
||||
"1000000000000000000000000000000005581816",
|
||||
"1000000000000000000000000000000005581826",
|
||||
"1000000000000000000000000000000005581836",
|
||||
"1000000000000000000000000000000005581846",
|
||||
"1000000000000000000000000000000005581856",
|
||||
"1000000000000000000000000000000005582760",
|
||||
"1000000000000000000000000000000005581866",
|
||||
"1000000000000000000000000000000005581876",
|
||||
"1000000000000000000000000000000005581886",
|
||||
"1000000000000000000000000000000005581896",
|
||||
"1000000000000000000000000000000005581906",
|
||||
"1000000000000000000000000000000005581310",
|
||||
"1000000000000000000000000000000005581320",
|
||||
"1000000000000000000000000000000005581330",
|
||||
"1000000000000000000000000000000005581340",
|
||||
"1000000000000000000000000000000005581350",
|
||||
"1000000000000000000000000000000005581360",
|
||||
"1000000000000000000000000000000005581370",
|
||||
"1000000000000000000000000000000005581380",
|
||||
"1000000000000000000000000000000005581390",
|
||||
"1000000000000000000000000000000005581400",
|
||||
"1000000000000000000000000000000005581410",
|
||||
"1000000000000000000000000000000005581420",
|
||||
"1000000000000000000000000000000005581430",
|
||||
"1000000000000000000000000000000005581440",
|
||||
"1000000000000000000000000000000005581450",
|
||||
"1000000000000000000000000000000005581460",
|
||||
"1000000000000000000000000000000005581470",
|
||||
"1000000000000000000000000000000005581480",
|
||||
"1000000000000000000000000000000005581490",
|
||||
"1000000000000000000000000000000005581500",
|
||||
"1000000000000000000000000000000005581510",
|
||||
"1000000000000000000000000000000005581520",
|
||||
"1000000000000000000000000000000005581530",
|
||||
"1000000000000000000000000000000005581540",
|
||||
"1000000000000000000000000000000005581550",
|
||||
"1000000000000000000000000000000005581560",
|
||||
"1000000000000000000000000000000005581570",
|
||||
"1000000000000000000000000000000005581580",
|
||||
"1000000000000000000000000000000005581590",
|
||||
"1000000000000000000000000000000005581600",
|
||||
];
|
||||
const STX = "\x02";
|
||||
const ETX = "\x03";
|
||||
|
||||
const scanner = new net.Socket();
|
||||
|
||||
scanner.connect(prodPort, prodIP, async () => {
|
||||
console.log("Connected to scanner");
|
||||
|
||||
const message = Buffer.from(
|
||||
`${STX}${scannerID}${scannerCommand}${ETX}`,
|
||||
"ascii",
|
||||
);
|
||||
console.log("Sending:", message.toString("ascii"));
|
||||
scanner.write(message);
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
for (let i = 0; i < labels.length; i++) {
|
||||
const l = labels[i];
|
||||
|
||||
const message = Buffer.from(`${STX}${scannerID}${l}${ETX}`, "ascii");
|
||||
console.log("Sending:", message.toString("ascii"));
|
||||
scanner.write(message);
|
||||
await new Promise((resolve) => setTimeout(resolve, 1200));
|
||||
}
|
||||
|
||||
// //close the incoming
|
||||
// await new Promise(resolve => setTimeout(resolve, 1500));
|
||||
// const message2 = Buffer.from(`${STX}${scannerID}${scannerCommand2}${ETX}`, "ascii");
|
||||
// console.log("Sending:", message2.toString("ascii"));
|
||||
// scanner.write(message2);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
scanner.destroy();
|
||||
});
|
||||
|
||||
scanner.on("data", async (data) => {
|
||||
console.log(
|
||||
"Response:",
|
||||
data
|
||||
.toString("ascii")
|
||||
.replace(/\x00/g, "") // remove null bytes
|
||||
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
|
||||
.trim(),
|
||||
);
|
||||
});
|
||||
|
||||
scanner.on("close", () => {
|
||||
console.log("Connection closed");
|
||||
});
|
||||
|
||||
scanner.on("error", (err) => {
|
||||
console.error("Scanner error:", err);
|
||||
});
|
||||
Reference in New Issue
Block a user