Compare commits

...

17 Commits

Author SHA1 Message Date
f481197d6e feat(new server): added mcd to the server setup 2025-04-01 16:23:13 -05:00
053c05c1a7 fix(perms): fixed the location for the test update 2025-04-01 16:22:52 -05:00
8bdbc4995c refactor(ocp): more work on perfecting dyco and labeling 2025-04-01 16:22:21 -05:00
bc2336e46d refactor(compile): changes to keep the last 20 builds 2025-04-01 16:22:01 -05:00
544e82c01d fix(update server missing migrate): missing 2025-04-01 16:21:15 -05:00
44507d41c4 feat(datamart): initial get active query migrated 2025-04-01 16:20:18 -05:00
ee3026fa7c fix(ocme): cycle count typos 2025-04-01 16:19:25 -05:00
5c642805b1 feat(notifications): migrated all from v1 2025-04-01 16:18:48 -05:00
0d06dae6de chore(release): bump build number to 135 2025-04-01 13:37:46 -05:00
8a639ceaf8 chore(release): bump build number to 134 2025-04-01 10:14:50 -05:00
8c6dc5f690 chore(release): bump build number to 133 2025-04-01 09:50:03 -05:00
16b39fd386 chore(release): bump build number to 132 2025-03-31 20:36:09 -05:00
075bba95ee chore(release): bump build number to 131 2025-03-31 20:33:34 -05:00
6ec9f5827c chore(release): bump build number to 130 2025-03-31 10:23:34 -05:00
93941723cc chore(release): bump build number to 129 2025-03-31 10:18:08 -05:00
fb41fb1dd1 chore(release): bump build number to 128 2025-03-30 18:51:23 -05:00
6d5bcde88e chore(release): bump build number to 127 2025-03-30 18:48:18 -05:00
51 changed files with 13432 additions and 9255 deletions

View File

@@ -0,0 +1,13 @@
CREATE TABLE "notifications" (
"notify_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text NOT NULL,
"description" text NOT NULL,
"checkInterval" text DEFAULT '1',
"timeType" text DEFAULT 'hour',
"emails" text,
"active" boolean DEFAULT false,
"lastRan" timestamp DEFAULT now(),
"notifiySettings" jsonb DEFAULT '{}'::jsonb
);
--> statement-breakpoint
CREATE UNIQUE INDEX "notify_name" ON "notifications" USING btree ("name");

File diff suppressed because it is too large Load Diff

View File

@@ -232,6 +232,13 @@
"when": 1743124980863, "when": 1743124980863,
"tag": "0032_tough_iron_monger", "tag": "0032_tough_iron_monger",
"breakpoints": true "breakpoints": true
},
{
"idx": 33,
"version": "7",
"when": 1743424730855,
"tag": "0033_flimsy_salo",
"breakpoints": true
} }
] ]
} }

View File

@@ -0,0 +1,36 @@
import {
boolean,
jsonb,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
} from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
export const notifications = pgTable(
"notifications",
{
notify_id: uuid("notify_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
description: text("description").notNull(),
checkInterval: text("checkInterval").default("1"),
timeType: text("timeType").default("hour"),
emails: text("emails"),
active: boolean("active").default(false),
lastRan: timestamp("lastRan").defaultNow(),
notifiySettings: jsonb("notifiySettings").default({}),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("notify_name").on(table.name),
]
);
// Schema for inserting a user - can be used to validate API requests
// export const insertRolesSchema = createInsertSchema(roles, {
// name: z.string().min(3, {message: "Role name must be more than 3 letters"}),
// });
// Schema for selecting a Expenses - can be used to validate API responses
export const selectNotificationsSchema = createSelectSchema(notifications);

View File

@@ -41,13 +41,22 @@ export default function OcmeCycleCount() {
setCounting(true); setCounting(true);
toast.success(`Cycle count started`); toast.success(`Cycle count started`);
try { try {
const res = await axios.post("/ocme/api/v1/cyclecount", data, { const res = await axios.post("/ocme/api/v1/cycleCount", data, {
headers: { Authorization: `Bearer ${token}` }, headers: { Authorization: `Bearer ${token}` },
}); });
toast.success(res.data.message);
setData(res.data.data); if (res.data.success) {
setCounting(false); toast.success(res.data.message);
reset(); setData(res.data.data);
setCounting(false);
reset();
}
if (res.data.success) {
toast.success(res.data.message);
setCounting(false);
}
} catch (error) { } catch (error) {
toast.error("There was an error cycle counting"); toast.error("There was an error cycle counting");
setCounting(false); setCounting(false);
@@ -137,7 +146,7 @@ export default function OcmeCycleCount() {
<TableHead>Result</TableHead> <TableHead>Result</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
{data.length === 0 ? ( {data?.length === 0 ? (
<TableBody> <TableBody>
{Array(10) {Array(10)
.fill(0) .fill(0)

18012
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -26,7 +26,8 @@
"prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev", "prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev",
"commit": "cz", "commit": "cz",
"prodinstall": "npm i --omit=dev && npm run db:migrate", "prodinstall": "npm i --omit=dev && npm run db:migrate",
"checkupdates": "npx npm-check-updates" "checkupdates": "npx npm-check-updates",
"testingCode": "dotenvx run -f .env -- tsx watch server/services/notifications/utils/masterNotifications.ts"
}, },
"config": { "config": {
"commitizen": { "commitizen": {
@@ -34,7 +35,7 @@
} }
}, },
"admConfig": { "admConfig": {
"build": 126, "build": 135,
"oldBuild": "backend-0.1.3.zip" "oldBuild": "backend-0.1.3.zip"
}, },
"devDependencies": { "devDependencies": {
@@ -44,6 +45,7 @@
"@types/js-cookie": "^3.0.6", "@types/js-cookie": "^3.0.6",
"@types/mssql": "^9.1.7", "@types/mssql": "^9.1.7",
"@types/node": "^22.13.11", "@types/node": "^22.13.11",
"@types/node-cron": "^3.0.11",
"@types/nodemailer": "^6.4.17", "@types/nodemailer": "^6.4.17",
"@types/pg": "^8.11.11", "@types/pg": "^8.11.11",
"@types/ws": "^8.18.0", "@types/ws": "^8.18.0",
@@ -64,6 +66,7 @@
"adm-zip": "^0.5.16", "adm-zip": "^0.5.16",
"axios": "^1.8.4", "axios": "^1.8.4",
"bcryptjs": "^3.0.2", "bcryptjs": "^3.0.2",
"croner": "^9.0.0",
"date-fns": "^4.1.0", "date-fns": "^4.1.0",
"drizzle-kit": "^0.30.5", "drizzle-kit": "^0.30.5",
"drizzle-orm": "^0.41.0", "drizzle-orm": "^0.41.0",

View File

@@ -0,0 +1,69 @@
export const freightClass = (
weight: number,
length: number,
width: number,
height: number
) => {
// mm to in conversion
const convertMM = 25.4;
const convertKG = 2.20462;
// Inputs
const weightPounds = weight * convertKG;
const lengthInches = length / convertMM;
const widthInches = width / convertMM;
const heightInches = height / convertMM;
// Calculate volume in cubic inches
const volumeCubicInches = lengthInches * widthInches * heightInches;
// Convert cubic inches to cubic feet
const volumeCubicFeet = volumeCubicInches / 1728;
// Calculate density
const density = weightPounds / volumeCubicFeet;
// Determine freight class
let freightClass;
if (density >= 50) {
freightClass = 50;
} else if (density >= 35) {
freightClass = 55;
} else if (density >= 30) {
freightClass = 60;
} else if (density >= 22.5) {
freightClass = 65;
} else if (density >= 15) {
freightClass = 70;
} else if (density >= 13.5) {
freightClass = 77.5;
} else if (density >= 12) {
freightClass = 85;
} else if (density >= 10.5) {
freightClass = 92.5;
} else if (density >= 9) {
freightClass = 100;
} else if (density >= 8) {
freightClass = 110;
} else if (density >= 7) {
freightClass = 125;
} else if (density >= 6) {
freightClass = 150;
} else if (density >= 5) {
freightClass = 175;
} else if (density >= 4) {
freightClass = 200;
} else if (density >= 3) {
freightClass = 250;
} else if (density >= 2) {
freightClass = 300;
} else if (density >= 1) {
freightClass = 400;
} else {
freightClass = 500;
}
// Output the freight class
return freightClass;
};

View File

@@ -25,6 +25,8 @@ import os from "os";
import { tryCatch } from "./globalUtils/tryCatch.js"; import { tryCatch } from "./globalUtils/tryCatch.js";
import { sendEmail } from "./services/notifications/controller/sendMail.js"; import { sendEmail } from "./services/notifications/controller/sendMail.js";
import notify from "./services/notifications/notifyService.js"; import notify from "./services/notifications/notifyService.js";
import eom from "./services/eom/eomService.js";
import dataMart from "./services/dataMart/dataMartService.js";
// create the main prodlogin here // create the main prodlogin here
const username = "lst_user"; const username = "lst_user";
@@ -100,6 +102,8 @@ const routes = [
loggerService, loggerService,
ocpService, ocpService,
notify, notify,
eom,
dataMart,
] as const; ] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {

View File

@@ -210,7 +210,9 @@ $plantFunness = {
Set-Location $serverPath Set-Location $serverPath
npm run prodinstall # --omit=dev npm run prodinstall # --omit=dev
Write-Host "Finished doing updates" Write-Host "Finished doing updates"
Start-Sleep -Seconds 1 # Start-Sleep -Seconds 1
# Write-HOst "Running db migrations"
# npm run db:migrate
########################################################### ###########################################################
# Old system still active until we have everything off it # Old system still active until we have everything off it
@@ -347,7 +349,7 @@ try {
$gatewayport = "4400" $gatewayport = "4400"
$systemport = "4200" $systemport = "4200"
$ocmeport = "4300" $ocmeport = "4300"
$appport = "4900" $appport = "4400"
if ($token -eq "usiow2") { if ($token -eq "usiow2") {
$dbLink = "lstBackendDB_2" $dbLink = "lstBackendDB_2"
@@ -406,13 +408,13 @@ try {
########################################################### ###########################################################
# Starting the services back up. # Starting the services back up.
########################################################### ###########################################################
Write-Host "Starting the services" # Write-Host "Starting the services"
Write-Host "Starting $($serviceSystem)" # Write-Host "Starting $($serviceSystem)"
Start-Service -DisplayName $serviceSystem # Start-Service -DisplayName $serviceSystem
Start-Sleep -Seconds 1 # Start-Sleep -Seconds 1
Write-Host "Starting $($serviceGateway)" # Write-Host "Starting $($serviceGateway)"
Start-Service -DisplayName $serviceGateway # Start-Service -DisplayName $serviceGateway
Start-Sleep -Seconds 1 # Start-Sleep -Seconds 1
#Write-Host "Starting $($serviceAuth)" #Write-Host "Starting $($serviceAuth)"
#Start-Service -DisplayName $serviceAuth #Start-Service -DisplayName $serviceAuth
#Start-Sleep -Seconds 1 #Start-Sleep -Seconds 1

View File

@@ -7,187 +7,196 @@ import { getAppInfo } from "../globalUtils/appInfo.js";
// create the ignore list // create the ignore list
const ignoreList = [ const ignoreList = [
".git", ".git",
"builds", "builds",
"server", "server",
"node_modules", "node_modules",
"apiDocsLSTV2", "apiDocsLSTV2",
"testFiles", "testFiles",
".env", ".env",
".gitignore", ".gitignore",
".versionrc.json", ".versionrc.json",
"drizzle-dev.config.ts", "drizzle-dev.config.ts",
"nssm.exe", "nssm.exe",
"postgresql-17.2-3-windows-x64.exe", "postgresql-17.2-3-windows-x64.exe",
// front end ignore // front end ignore
"frontend/node_modules", "frontend/node_modules",
"fonrtend/.env", "fonrtend/.env",
"frontend/public", "frontend/public",
"frontend/src", "frontend/src",
"frontend/.gitignore", "frontend/.gitignore",
"frontend/eslint.config.js", "frontend/eslint.config.js",
"frontend/index.html", "frontend/index.html",
"frontend/package.json", "frontend/package.json",
"frontend/package-lock.json", "frontend/package-lock.json",
"frontend/README.md", "frontend/README.md",
"frontend/tsconfig.json", "frontend/tsconfig.json",
"frontend/tsconfig.app.json", "frontend/tsconfig.app.json",
"frontend/tsconfig.node.json", "frontend/tsconfig.node.json",
"frontend/vite.config.ts", "frontend/vite.config.ts",
"frontend/components.json", "frontend/components.json",
]; ];
const shouldIgnore = (itemPath: any) => { const shouldIgnore = (itemPath: any) => {
const normalizedItemPath = itemPath.replace(/\\/g, "/"); const normalizedItemPath = itemPath.replace(/\\/g, "/");
return ignoreList.some((ignorePattern) => { return ignoreList.some((ignorePattern) => {
const normalizedIgnorePatther = ignorePattern.replace(/\\/g, "/"); const normalizedIgnorePatther = ignorePattern.replace(/\\/g, "/");
return ( return (
normalizedItemPath === normalizedIgnorePatther || normalizedItemPath === normalizedIgnorePatther ||
normalizedItemPath.startsWith(`${normalizedIgnorePatther}/`) normalizedItemPath.startsWith(`${normalizedIgnorePatther}/`)
); );
}); });
}; };
const addToZip = (zip: any, currentPath: string, rootPath: string) => { const addToZip = (zip: any, currentPath: string, rootPath: string) => {
const items = fs.readdirSync(currentPath); const items = fs.readdirSync(currentPath);
items.forEach((item) => { items.forEach((item) => {
const itemPath = path.join(currentPath, item); const itemPath = path.join(currentPath, item);
const relativePath = path.relative(rootPath, itemPath); const relativePath = path.relative(rootPath, itemPath);
// Skip if the item is in the ignore list // Skip if the item is in the ignore list
if (shouldIgnore(relativePath)) { if (shouldIgnore(relativePath)) {
createLog("info", "lst", "zipUpBuild", `Ignoring: ${relativePath}`); createLog("info", "lst", "zipUpBuild", `Ignoring: ${relativePath}`);
return; return;
} }
const stat = fs.statSync(itemPath); const stat = fs.statSync(itemPath);
if (stat.isDirectory()) { if (stat.isDirectory()) {
// If it's a directory, recursively add its contents // If it's a directory, recursively add its contents
addToZip(zip, itemPath, rootPath); addToZip(zip, itemPath, rootPath);
} else { } else {
// If it's a file, add it to the zip with the preserved folder structure // If it's a file, add it to the zip with the preserved folder structure
zip.addLocalFile(itemPath, path.dirname(relativePath)); zip.addLocalFile(itemPath, path.dirname(relativePath));
} }
}); });
}; };
const updateBuildNumber = (appLock: string) => { const updateBuildNumber = (appLock: string) => {
const packagePath = path.join(appLock, "package.json"); // Adjust path if necessary const packagePath = path.join(appLock, "package.json"); // Adjust path if necessary
try { try {
// Read package.json // Read package.json
const pkgData = fs.readFileSync(packagePath, "utf8"); const pkgData = fs.readFileSync(packagePath, "utf8");
const pkgJson = JSON.parse(pkgData); const pkgJson = JSON.parse(pkgData);
// Ensure admConfig exists // Ensure admConfig exists
if (pkgJson.admConfig && typeof pkgJson.admConfig.build === "number") { if (pkgJson.admConfig && typeof pkgJson.admConfig.build === "number") {
// Increment the build number // Increment the build number
pkgJson.admConfig.build += 1; pkgJson.admConfig.build += 1;
// Write the updated data back // Write the updated data back
fs.writeFileSync(packagePath, JSON.stringify(pkgJson, null, 2), "utf8"); fs.writeFileSync(
packagePath,
JSON.stringify(pkgJson, null, 2),
"utf8"
);
createLog( createLog(
"info", "info",
"lst", "lst",
"zipUpBuild", "zipUpBuild",
`Build number updated to: ${pkgJson.admConfig.build}` `Build number updated to: ${pkgJson.admConfig.build}`
); );
// Auto-commit changes // Auto-commit changes
execSync("git add package.json"); execSync("git add package.json");
execSync( execSync(
`git commit -m "chore(release): bump build number to ${pkgJson.admConfig.build}"` `git commit -m "chore(release): bump build number to ${pkgJson.admConfig.build}"`
); );
} else { } else {
createLog( createLog(
"error", "error",
"lst", "lst",
"zipUpBuild", "zipUpBuild",
"admConfig.build is missing or not a number" "admConfig.build is missing or not a number"
); );
} }
} catch (error) { } catch (error) {
createLog( createLog(
"error",
"lst",
"zipUpBuild",
`Error updating build number: ${error}`
);
}
};
export const createZip = async (appLock: string) => {
const app = await getAppInfo(appLock);
const zip = new AdmZip();
//dest path for this app... hard coded for meow will be in db later
const destPath = `${process.env.DEVFOLDER}\\builds`;
const srcPath = `${process.env.DEVFOLDER}`;
addToZip(zip, srcPath, srcPath);
// Write the zip file to disk
const outputZipPath = path.join(
destPath,
`${app.name}-${app.version}-${app.admConfig.build}.zip`
);
zip.writeZip(outputZipPath);
createLog(
"info",
"lst",
"zipUpBuild",
`Zip file created at ${outputZipPath}`
);
updateBuildNumber(appLock);
// only keep the last 5 builds for the type we have.
try {
const appFiles = fs
.readdirSync(destPath)
.filter((file) => file.startsWith(app.name)) // Ensure only backend files are matched
.map((file) => ({
name: file,
time: fs.statSync(path.join(destPath, file)).mtime.getTime(),
}))
.sort((a, b) => a.time - b.time); // Sort by modification time (oldest first)
createLog(
"info",
"lst",
"zipUpBuild",
`app Files (sorted by time):", ${JSON.stringify(appFiles)}`
);
if (appFiles.length > 5) {
appFiles.slice(0, -5).forEach((file) => {
const filePath = path.join(destPath, file.name);
try {
fs.unlinkSync(filePath);
createLog("info", "lst", "zipUpBuild", `Deleted: ${file.name}`);
} catch (error: any) {
createLog(
"error", "error",
"lst", "lst",
"zipUpBuild", "zipUpBuild",
`Failed to delete ${file.name}: ${error.message}` `Error updating build number: ${error}`
); );
}
});
} else {
createLog("info", "lst", "zipUpBuild", "No files to delete.");
} }
} catch (error: any) { };
createLog(
"error", export const createZip = async (appLock: string) => {
"lst", const app = await getAppInfo(appLock);
"zipUpBuild", const zip = new AdmZip();
`Error reading directory or deleting files:", ${error.message}`
//dest path for this app... hard coded for meow will be in db later
const destPath = `${process.env.DEVFOLDER}\\builds`;
const srcPath = `${process.env.DEVFOLDER}`;
addToZip(zip, srcPath, srcPath);
// Write the zip file to disk
const outputZipPath = path.join(
destPath,
`${app.name}-${app.version}-${app.admConfig.build}.zip`
); );
} zip.writeZip(outputZipPath);
createLog(
"info",
"lst",
"zipUpBuild",
`Zip file created at ${outputZipPath}`
);
updateBuildNumber(appLock);
// only keep the last 5 builds for the type we have.
try {
const appFiles = fs
.readdirSync(destPath)
.filter((file) => file.startsWith(app.name)) // Ensure only backend files are matched
.map((file) => ({
name: file,
time: fs.statSync(path.join(destPath, file)).mtime.getTime(),
}))
.sort((a, b) => a.time - b.time); // Sort by modification time (oldest first)
createLog(
"info",
"lst",
"zipUpBuild",
`app Files (sorted by time):", ${JSON.stringify(appFiles)}`
);
if (appFiles.length > 20) {
appFiles.slice(0, -20).forEach((file) => {
const filePath = path.join(destPath, file.name);
try {
fs.unlinkSync(filePath);
createLog(
"info",
"lst",
"zipUpBuild",
`Deleted: ${file.name}`
);
} catch (error: any) {
createLog(
"error",
"lst",
"zipUpBuild",
`Failed to delete ${file.name}: ${error.message}`
);
}
});
} else {
createLog("info", "lst", "zipUpBuild", "No files to delete.");
}
} catch (error: any) {
createLog(
"error",
"lst",
"zipUpBuild",
`Error reading directory or deleting files:", ${error.message}`
);
}
}; };
//createZip("C:\\Users\\matthes01\\Documents\\lstv2"); //createZip("C:\\Users\\matthes01\\Documents\\lstv2");
@@ -195,16 +204,16 @@ export const createZip = async (appLock: string) => {
// Only call `createZip` if the script is executed directly // Only call `createZip` if the script is executed directly
if (process.argv.length > 2) { if (process.argv.length > 2) {
const location = process.argv[2]; const location = process.argv[2];
if (!location) { if (!location) {
createLog("error", "lst", "zipUpBuild", "Error: No location provided."); createLog("error", "lst", "zipUpBuild", "Error: No location provided.");
process.exit(1); process.exit(1);
} else { } else {
createLog("info", "lst", "zipUpBuild", "Startiing the zip process."); createLog("info", "lst", "zipUpBuild", "Startiing the zip process.");
} }
createZip(location); createZip(location);
} else { } else {
createLog("error", "lst", "zipUpBuild", "Error: No location provided."); createLog("error", "lst", "zipUpBuild", "Error: No location provided.");
} }

View File

@@ -0,0 +1,13 @@
import { query } from "../../sqlServer/prodSqlServer.js";
import { activeArticle } from "../../sqlServer/querys/dataMart/article.js";
export const getActiveAv = async () => {
let articles: any = [];
try {
articles = await query(activeArticle, "Get active articles");
} catch (error) {
articles = error;
}
return articles;
};

View File

@@ -0,0 +1,13 @@
import { OpenAPIHono } from "@hono/zod-openapi";
import getArticles from "./route/getActiveArticles.js";
const app = new OpenAPIHono();
const routes = [getArticles] as const;
const appRoutes = routes.forEach((route) => {
app.route("/datamart", route);
});
export default app;

View File

@@ -0,0 +1,47 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getActiveAv } from "../controller/getActiveArticles.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns all the Active articles.",
method: "get",
path: "/getarticles",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
//apiHit(c, { endpoint: `api/logger/logs/id` });
try {
return c.json(
{
success: true,
message: "Current active Articles",
data: await getActiveAv(),
},
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,139 @@
// import cron from "node-cron";
// import {runQuery, prisma, totalInvNoRn, activeArticle, getShiftTime, historicalInv} from "database";
// import {createLog} from "logging";
// import {deleteHistory} from "./deleteHistory.js";
// export const historyInv = async (date) => {
// //console.log(date);
// if (!date) {
// return `Missing Data`;
// }
// // date should be sent over as a string IE: 2024-01-01
// let inv = [];
// try {
// inv = await prisma.historyInventory.findMany({where: {histDate: date}});
// console.log(inv.length);
// // if the date returns nothing we need to pull the historical data
// if (inv.length === 0) {
// const result = await prisma.settings.findFirst({where: {name: "plantToken"}});
// try {
// const plantUpdate = historicalInv.replaceAll("test1", result.value);
// const queryDate = plantUpdate.replaceAll("[date]", date);
// inv = await runQuery(queryDate, "Get histical inv");
// return inv;
// } catch (error) {
// createLog("general/eom", "error", "There was an error getting the historical inv.");
// return error;
// }
// } else {
// return inv;
// }
// //return inv;
// } catch (error) {
// console.log(error);
// return error;
// }
// };
// // start the cron job for getting the hostrical inv based on the plants shift time
// export const startCronHist = () => {
// let shiftTime = ["06", "00", "00"];
// const startProcess = async () => {
// let inv = [];
// let articles = [];
// let plantToken = "test1";
// const date = new Date();
// const dateString = date.toISOString().split("T")[0];
// date.setDate(date.getDate() - 30);
// const oldDate = date.toISOString().split("T")[0];
// // checking if even need to run this
// // before adding more make sure we dont already have data
// const checkInv = await prisma.historyInventory.findFirst({where: {histDate: dateString}});
// if (checkInv) {
// createLog(
// "general/eom",
// "warn",
// `There seems to already be inventory added for ${dateString}, no new data will be added`
// );
// return;
// }
// // get plant token
// try {
// const result = await prisma.settings.findFirst({where: {name: "plantToken"}});
// plantToken = result.value;
// } catch (error) {
// createLog("general/eom", "error", "failed to get planttoken");
// }
// //get shift time
// try {
// const result = await runQuery(getShiftTime.replaceAll("test1", plantToken), "GettingShift time");
// shiftTime = result[0].shiftStartTime.split(":");
// } catch (error) {
// createLog("general/eom", "error", `Error running getShift Query: ${error}`);
// }
// // get inventory
// try {
// const result = await runQuery(totalInvNoRn.replaceAll("test1", plantToken), "getting inventory");
// inv = result;
// } catch (error) {
// createLog("general/eom", "error", `Error running get inventory Query: ${error}`);
// }
// // get active articles
// try {
// const result = await runQuery(activeArticle.replaceAll("test1", plantToken), "Get active articles");
// articles = result;
// } catch (error) {
// createLog("general/eom", "error", `Error running get article: ${error}`);
// }
// //add the inventory to the historical table
// try {
// let hist = Object.entries(inv).map(([key, value]) => {
// // remove the values we dont want in the historical view
// const {total_Pallets, avalible_Pallets, coa_Pallets, held_Pallets, ...histData} = value;
// // get av tyep
// const avType = articles.filter((a) => (a.IdArtikelvarianten = inv[key].av))[0].TypeOfMaterial;
// // add in the new fields
// const hist = {
// ...histData,
// histDate: dateString, //new Date(Date.now()).toISOString().split("T")[0],
// avType,
// };
// return hist;
// });
// try {
// const addHistData = await prisma.historyInventory.createMany({data: hist});
// createLog(
// "general/eom",
// "info",
// `${addHistData.count} were just added to the historical inventory for date ${dateString}`
// );
// } catch (error) {
// createLog("general/eom", "error", `Adding new historical inventory error: ${error}`);
// }
// // delete the older inventory
// deleteHistory(oldDate);
// } catch (error) {
// createLog("general/eom", "error", `Adding new historical inventory error: ${error}`);
// }
// };
// // actaully run the process once after restaart just to make sure we have inventory
// startProcess();
// // setup the cron stuff
// const startHour = shiftTime[0];
// const startMin = shiftTime[1];
// createLog("general/eom", "info", `Historical Data will run at ${shiftTime[0]}:${shiftTime[1]} daily`);
// cron.schedule(`${startMin} ${startHour} * * *`, () => {
// createLog("general/eom", "info", "Running historical invnetory.");
// startProcess();
// });
// };

View File

@@ -0,0 +1,15 @@
// import {prisma} from "database";
// import {createLog} from "logging";
// export const deleteHistory = async (date: string) => {
// // delete the inventory if it equals this date
// try {
// const remove = await prisma.$executeRaw`
// DELETE FROM historyInventory
// WHERE histDate < ${date}
// `;
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
// } catch (error) {
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
// }
// };

View File

@@ -1,5 +1,13 @@
import {OpenAPIHono} from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
import stats from "./route/stats.js";
import history from "./route/invHistory.js";
const routes = [stats, history] as const;
const appRoutes = routes.forEach((route) => {
app.route("/eom", route);
});
export default app; export default app;

View File

@@ -0,0 +1,41 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets the correct eom history.",
method: "post",
path: "/histinv",
request: {
params: EomStat,
},
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
//apiHit(c, { endpoint: `api/logger/logs/id` });
try {
return c.json({ success: true, message: "", data: [] }, 200);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,41 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Adds in the stats for the eom.",
method: "post",
path: "/stats",
request: {
params: EomStat,
},
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: `api/logger/logs/id` });
try {
return c.json({ success: true, message: "", data: [] }, 200);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,143 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../sendMail.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
export interface DownTime {
downTimeId?: number;
machineAlias?: string;
}
export default async function reprintLabelMonitor(notifyData: any) {
// we will over ride this with users that want to sub to this
// a new table will be called subalerts and link to the do a kinda linkn where the user wants it then it dose subId: 1, userID: x, notificationId: y. then in here we look up the userid to get the email :D
// this could then leave the emails in the notificaion blank and let users sub to it.
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
// console.log(data.secondarySetting[0].duration);
let dQuery = `
SELECT
[IdHistoryStillstandsereignis] as downTimeId
,DATEDIFF(MINUTE,b.[Startzeit], b.[Endzeit]) as totalDuration
--, b.[IdMaschine]
,x.[Bezeichnung] as machineAlias
--,b.[IdStillstandsGrund],
, c.CTO_Code
,c.Downtime_Description
--,b.[IdFehlermerkmal],
,case when g.DT_Group_Desc is null then 'Not assigned yet' else g.DT_Group_Desc end as groupDesc
,b.[Bemerkung] as remark
,CONVERT(VARCHAR, CAST(b.[Startzeit] AS DATETIME), 100) dtStart
,CONVERT(VARCHAR, CAST(b.[Endzeit] AS DATETIME), 100) dtEnd
FROM Alplaprod_test1.[dbo].[T_HistoryStillstandsereignis] (nolock)b
--get the machine info
left join
Alplaprod_test1.[dbo].[T_Maschine] (nolock)x
on b.IdMaschine = x.IdMaschine
-- add in the cto codes
left join
Alplaprod_test1.[dbo].[V_MES_Downtime_Reasons] (nolock)c
on b.IdStillstandsGrund = c.Local_Downtime_ID
left join
Alplaprod_test1.[dbo].[V_MES_Downtime_Characteristics] (nolock)g
on b.IdFehlermerkmal = g.Local_DT_Characteristic_Id
where DATEDIFF(MINUTE,b.[Startzeit],b.[Endzeit]) > ${
notifyData.notifiySettings
? notifyData.notifiySettings?.duration
: 10
}
and b.[Startzeit] > getDate() - ${
notifyData.notifiySettings
? notifyData.notifiySettings?.daysInPast
: 10
} --adding this date check in so we dont get everything possible
and c.CTO_Code not like 'a%'
and c.CTO_Code not like 'b%'
and c.CTO_Code not like 'c%'
and c.CTO_Code not like 'd%'
and c.CTO_Code not like 'e%'
and c.CTO_Code not like 'f%'
and c.CTO_Code not like 'y%'
order by IdHistoryStillstandsereignis desc
`;
//console.log(query);
let downTime: any; //DownTime[];
try {
downTime = await query(dQuery, "downTimeCheck");
//console.log(labels.length);
if (
downTime.length > 0 &&
downTime[0]?.downTimeId > notifyData.notifiySettings.prodID
) {
//send the email :D
const emailSetup = {
emailTo: notifyData.emails,
subject: `Alert! Downtime recorded greater than ${
notifyData.notifiySettings?.duration
}min ${
downTime.length === 1
? `on ${downTime[0].machineAlias}`
: ""
}`,
template: "downTimeCheck",
context: {
items: downTime,
secondarySetting: notifyData.notifiySettings,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notifyData.notifiySettings,
prodID: downTime[0].downTimeId,
},
})
.where(eq(notifications.name, notifyData.name))
);
}
} catch (err) {
createLog(
"error",
"notify",
"notify",
`Error from running the downtimeCheck query: ${err}`
);
}
}

View File

@@ -0,0 +1,27 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
// notifiySettings: {
// ...updateSettings,
// prodID: labels[0].IdEtikettenHistorie,
// },
})
.where(eq(notifications.name, notifyData.name))
);
};
export default notification;

View File

@@ -0,0 +1,133 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { isWeekend } from "date-fns";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { eq, sql } from "drizzle-orm";
export interface PPOO {
IdPosition?: number;
}
export default async function reprintLabelMonitor(notifyData: any) {
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
// parse the secondarySetting back to json to use it.
// notifyData = { ...notifyData, secondarySetting: JSON.parse(notifyData.secondarySetting) };
// as this one goes to managers we want to not send on the weekends
const weekend = isWeekend(new Date(Date.now()));
if (weekend && notifyData.notifiySettings.weekend) {
createLog(
"info",
"notify",
"notify",
`${notifyData.name} will not run on the weekends`
);
return;
}
let notifyQuery = `
SELECT
--[EinlagerungsDatummin] as lastMovingDate,
round(VerfuegbareMengeVPKSum,2) as pallets
,VerfuegbareMengeSum as total
,round([GesperrteMengeVpkSum],2) as held
,round([GesperrteMengeSum],2) as heldQty
,[IdArtikelVarianten] as av
,[IdProdBereich] as pfcID
,[ArtikelVariantenBez] as articleDescription
,[ArtikelVariantenAlias] as articleDescriptionAlias
,[LagerAbteilungKurzBez] as location
,[Lfdnr] as runningNumber
,[Produktionslos] as lot
,[ProduktionsDatumMin] as productionDate
,IdPosition
FROM [AlplaPROD_test1].[dbo].[V_LagerPositionenBarcodes] (nolock)
where idlagerabteilung in ([locations]) and [ProduktionsDatumMin] < DATEadd( Hour, -[timeCheck], getdate())
order by [ProduktionsDatumMin] asc
`;
//update the time check
notifyQuery = notifyQuery.replaceAll("[timeCheck]", notifyData.checkTime);
notifyQuery = notifyQuery.replaceAll(
"[locations]",
notifyData.notifiySettings.locations
);
let prod: PPOO[];
try {
prod = await query(notifyQuery, "Label Reprints");
//console.log(labels.length);
// const now = Date.now()
if (prod.length > 0) {
//send the email :D
// update the count with the result
const emailSetup = {
emailTo: notifyData.emails,
subject: `Alert! Pallets in production greater than ${notifyData.checkTime} ${notifyData.timeType}`,
template: "productionCheck",
context: {
items: prod,
count: prod.length,
checkTime: notifyData.checkTime,
timeCheck: notifyData.timeType,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
let updateSettings = notifyData.notifiySettings;
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...updateSettings,
count: prod.length,
prodID: prod[0].IdPosition,
},
})
.where(eq(notifications.name, notifyData.name))
);
} else {
return;
}
} catch (err) {
createLog(
"error",
"sql",
"error",
`Error from running the Label Reprints query: ${err}`
);
}
}

View File

@@ -0,0 +1,158 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
export interface Blocking {
HumanReadableId?: number;
subject?: string;
}
export default async function qualityBlockingMonitor(notifyData: any) {
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
let blockQuery = `
SELECT
'Alert! new blocking order: #' + cast(HumanReadableId as varchar) + ' - ' + ArticleVariantDescription as subject,
cast([HumanReadableId] as varchar) as blockingNumber,
[ArticleVariantDescription] as article,
cast([CustomerHumanReadableId] as varchar) + ' - ' + [CustomerDescription] as customer,
convert(varchar(10), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 101) + ' - ' + convert(varchar(5), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 108) as blockingDate,
cast(ArticleVariantHumanReadableId as varchar) + ' - ' + ArticleVariantDescription as av,
case when [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark = '' or [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark is NULL then 'Please reach out to quality for the reason this was placed on hold as a remark was not entered during the blocking processs' else [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark end as remark,
cast(FORMAT(TotalAmountOfPieces, '###,###') as varchar) + ' / ' + cast(LoadingUnit as varchar) as peicesAndLoadingUnits,
[test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId as lotNumber,
cast(IdGlobalBlockingDefectsGroup as varchar) + ' - ' + BD.Description as mainDefectGroup,
cast(IdGlobalBlockingDefect as varchar) + ' - ' + MD.Description as mainDefect,
sent=0,
lot.MachineLocation as line,
HumanReadableId
FROM [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder] (nolock)
/*** Join 1.0 table to get correct id info to link ***/
join
[AlplaPROD_test1].[dbo].[T_BlockingOrders] (nolock) AS BO
on [HumanReadableId] = BO.[IdBlockingOrder]
/*** Get the main defect info ***/
Inner join
[AlplaPROD_test1].[dbo].[T_BlockingDefectsGroups] (nolock) as BD
ON BO.IdMainDefectGroup = BD.IdBlockingDefectsGroup
INNER join
[AlplaPROD_test1].[dbo].[T_BlockingDefects] as MD
ON BO.IdMainDefect = MD.IdBlockingDefect
/*** get lot info ***/
left join
(SELECT [MachineLocation]
,[MachineDescription]
,[ProductionLotHumanReadableId]
FROM [test1_AlplaPROD2.0_Reporting].[reporting_productionControlling].[ProducedLot]) as lot
on [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId = lot.ProductionLotHumanReadableId
where [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate] between getdate() - 1 and getdate() + 1
and [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].BlockingTrigger = 1
and HumanReadableId NOT IN ([sentBlockingOrders])
`;
//add the blocking orders in.
blockQuery = blockQuery.replaceAll(
"[sentBlockingOrders]",
notifyData.sentBlocking[0].sentBlockingOrders
);
let blocking: any;
try {
blocking = await query(blockQuery, "Quality Blocking");
//console.log(labels.length);
// const now = Date.now()
//console.log(blocking);
// console.log(blocking[0].blockingNumber > data.prodID);
if (
blocking.length > 0 &&
blocking[0].HumanReadableId > notifyData.notifiySettings.prodID
) {
//send the email :D
const emailSetup = {
emailTo: notifyData.emails,
subject:
blocking.length > 0
? `Alert! New blocking orders.`
: blocking[0].subject,
template: "qualityBlocking",
context: {
items: blocking,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"nofity",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
// add the new blocking order to this
const newBlockingOrders = blocking.map(
(b: any) => b.HumanReadableId
);
//console.log(newBlockingOrders);
//console.log(sentBlocking[0].sentBlockingOrders);
// Ensure no duplicates
const uniqueOrders = Array.from(
new Set([
...notifyData.sentBlocking[0].sentBlockingOrders,
...newBlockingOrders,
])
);
// Update sentBlockingOrders
notifyData.sentBlocking[0].sentBlockingOrders = uniqueOrders;
//console.log(notifUpdate);
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notifyData.notifiySettings,
prodID: blocking[0].HumanReadableId,
sentBlockingOrders: uniqueOrders,
},
})
.where(eq(notifications.name, notifyData.name))
);
}
} catch (err) {
createLog(
"error",
"notify",
"notify",
`Error from running the blocking query: ${err}`
);
}
}

View File

@@ -0,0 +1,118 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
export interface Labels {
IdEtikettenHistorie?: number;
}
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
// well set a backup default time here
let timeCheck = `DATEADD(SECOND, -30, getdate()) `;
// set the time of getting the label
if (notifyData.timeType === "sec") {
timeCheck = `DATEADD(SECOND, -${notifyData.checkTime}, getdate()) `;
} else if (notifyData.timeType === "min") {
timeCheck = `DATEADD(MINUTE, -${notifyData.checkTime}, getdate()) `;
}
let reprintQuery = `
SELECT
IdArtikelvarianten as av,
ArtikelVariantenBez as alias,
LfdNr as runningNumber,
CONVERT(VARCHAR, CAST(Add_Date AS DATETIME), 100) Add_Date,
Add_User,
CONVERT(VARCHAR, CAST(Upd_Date AS DATETIME), 100) Upd_Date,
Upd_User,
EtikettenDruckerBezeichnung as printer,
AnzahlGedruckterKopien as totalPrinted
FROM Alplaprod_test1.dbo.V_EtikettenGedruckt (nolock)
where AnzahlGedruckterKopien > 2
and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108)
and Upd_Date > DATEADD(SECOND, -30, getdate())
and VpkVorschriftBez not like '%$%'
`;
//update the time check
reprintQuery = reprintQuery.replaceAll(
"DATEADD(SECOND, -30, getdate()) ",
timeCheck
);
//let labels: Labels[];
const { data: labels, error: labelError } = await tryCatch(
query(reprintQuery, "Label Reprints")
);
if (labels.length > 0) {
//send the email :D
const emailSetup = {
emailTo: notifyData.emails,
subject: "Alert! Label Reprinted",
template: "reprintLabels",
context: {
items: labels,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// update the last time ran
const updateSettings = notifyData.notifiySettings;
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...updateSettings,
prodID: labels[0].IdEtikettenHistorie,
},
})
.where(eq(notifications.name, notifyData.name))
);
} else {
return;
}
};
export default notification;

View File

@@ -0,0 +1,129 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { isWeekend } from "date-fns";
import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../sendMail.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
export interface PPOO {
IdPosition?: number;
}
export default async function reprintLabelMonitor(notifyData: any) {
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.notificationName}`
);
return;
}
// as this one goes to managers we want to not send on the weekends
const weekend = isWeekend(new Date(Date.now()));
if (weekend && notifyData.notifiySettings.weekend) {
createLog(
"info",
"notify",
"notify",
`${notifyData.name} will not run on the weekends`
);
return;
}
let noteQuery = `
SELECT
--[EinlagerungsDatummin] as lastMovingDate,
round(VerfuegbareMengeVPKSum,2) as pallets
,VerfuegbareMengeSum as total
,round([GesperrteMengeVpkSum],2) as held
,round([GesperrteMengeSum],2) as heldQty
,[IdArtikelVarianten] as av
,[IdProdBereich] as pfcID
,[ArtikelVariantenBez] as articleDescription
,[ArtikelVariantenAlias] as articleDescriptionAlias
,[LagerAbteilungKurzBez] as location
,[Lfdnr] as runningNumber
,[Produktionslos] as lot
,[ProduktionsDatumMin] as productionDate
,IdPosition
FROM [AlplaPROD_test1].[dbo].[V_LagerPositionenBarcodes] (nolock)
where idlagerabteilung in ([locations]) and [ProduktionsDatumMin] < DATEadd( Hour, -[timeCheck], getdate())
order by [ProduktionsDatumMin] asc
`;
//update the time check
noteQuery = noteQuery
.replaceAll("[timeCheck]", notifyData.checkTime)
.replaceAll("[locations]", notifyData.notifiySettings.locations);
let stage: PPOO[];
try {
stage = await query(noteQuery, "Staging checks");
//console.log(labels.length);
// const now = Date.now()
if (stage.length > 0) {
//send the email :D
// update the count with the result
const emailSetup = {
emailTo: notifyData.emails,
subject: `Alert! Pallets in staging greater than ${notifyData.checkTime} ${notifyData.timeType}`,
template: "stagingCheck",
context: {
items: stage,
count: stage.length,
checkTime: notifyData.checkTime,
timeCheck: notifyData.timeType,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
// update the last time we ran and the prod id
let updateSettings = notifyData.notifiySettings;
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...updateSettings,
count: stage.length,
},
})
.where(eq(notifications.name, notifyData.name))
);
} else {
return;
}
} catch (err) {
createLog(
"error",
"notify",
"notify",
`Error from running the Label Reprints query: ${err}`
);
}
}

View File

@@ -0,0 +1,201 @@
export let xmlPayloadTI = `
<service-request>
<service-id>ImportWeb</service-id>
<request-id>[requestID]</request-id>
<data>
<WebImport>
[WebImportHeader]
<WebImportFile>
<MercuryGate>
<Header>
<SenderID/>
<ReceiverID/>
<DocTypeID>MasterBillOfLading</DocTypeID>
<DocCount>1</DocCount>
</Header>
<Load action="UpdateOrAdd">
<Enterprise name="" customerAcctNum="[customerAccountNum]"/>
<AssignedTo/>
<ReferenceNumbers>
<ReferenceNumber type="Load Number" isPrimary="true">[loadNumber]</ReferenceNumber>
</ReferenceNumbers>
<Payment>
<Method>Prepaid</Method>
<BillTo thirdParty="False">
<Address Type="BillTo" isResidential="False">
<Alias/>
<Name>ALPLA</Name>
<AddrLine1>CO TRANSPORTATION INSIGHT</AddrLine1>
<AddrLine2>PO BOX 23000</AddrLine2>
<City>HICKORY</City>
<StateProvince>NC</StateProvince>
<PostalCode>28603</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts/>
</Address>
</BillTo>
</Payment>
<PriceSheets>
<PriceSheet type="Carrier" isSelected="false"> // get this from the price sheet
<ContractId/>
<SCAC/>
<Mode/>
</PriceSheet>
</PriceSheets>
<Plan>
<Events count="2">
<Event type="Pickup" sequenceNum="1">
<Dates>
<Date type="earliest">[loadingDate]</Date>
<Date type="latest">[deliveryDate]</Date>
</Dates>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[plantName]</Name>
<AddrLine1>[plantStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[plantCity]</City>
<StateProvince>[plantState]</StateProvince>
<PostalCode>[plantZipCode]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts>
<Contact type="">
<Name/>
<ContactMethods>
<ContactMethod sequenceNum="1" type="phone">[contactNum]</ContactMethod>
<ContactMethod sequenceNum="1" type="email">[contactEmail]</ContactMethod>
</ContactMethods>
</Contact>
</Contacts>
</Address>
<Shipments>
<ReferenceNumbers>
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
</ReferenceNumbers>
</Shipments>
</Event>
<Event type="Drop" sequenceNum="2">
<Dates>
<Date type="earliest">[loadingDate]</Date>
<Date type="latest">[deliveryDate]</Date>
</Dates>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[customerName]</Name>
<AddrLine1>[customerStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[customerCity]</City>
<StateProvince>[customerState]</StateProvince>
<PostalCode>[customerZip]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts>
<Contact type="">
<Name/>
<ContactMethods>
<ContactMethod sequenceNum="1" type="phone">800-555-1122</ContactMethod>
</ContactMethods>
</Contact>
</Contacts>
</Address>
<Shipments>
<ReferenceNumbers>
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
</ReferenceNumbers>
</Shipments>
</Event>
</Events>
</Plan>
<Shipments>
<Shipment type="Regular" action="UpdateOrAdd">
<Status>Pending</Status>
<Enterprise name="" customerAcctNum="[customerAccountNum]"/>
<ReferenceNumbers>
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
<ReferenceNumber type="PO Number" isPrimary="false">[customerPO]</ReferenceNumber>
[multieReleaseNumber]
<ReferenceNumber type="Store Number" isPrimary="false">[glCoding]</ReferenceNumber>
<ReferenceNumber type="Profit Center" isPrimary="false">[pfc]</ReferenceNumber>
</ReferenceNumbers>
<Services/>
<EquipmentList/>
6
<Dates>
<Pickup>
<Date type="earliest">[loadingDate]</Date>
<Date type="latest">[loadingDate]</Date>
</Pickup>
<Drop>
<Date type="earliest">[deliveryDate]</Date>
<Date type="latest">[deliveryDate]</Date>
</Drop>
</Dates>
<PriceSheets>
<PriceSheet type="Carrier" isSelected="false">
<ContractId/>
<SCAC/>
<Mode/>
</PriceSheet>
</PriceSheets>
<Shipper>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[plantName]</Name>
<AddrLine1>[plantStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[plantCity]</City>
<StateProvince>[plantState]</StateProvince>
<PostalCode>[plantZipCode]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts>
<Contact type="">
<Name/>
<ContactMethods>
<ContactMethod sequenceNum="1" type="phone">[contactNum]</ContactMethod>
</ContactMethods>
</Contact>
</Contacts>
</Address>
</Shipper>
<Consignee>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[customer]</Name>
<AddrLine1>[customerStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[customerCity]</City>
<StateProvince>[customerState]</StateProvince>
<PostalCode>[customerZip]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts />
</Address>
</Consignee>
<ItemGroups>
[items]
</ItemGroups>
<Payment>
<Method>Prepaid</Method>
<BillTo thirdParty="False">
<Address Type="BillTo" isResidential="False">
<Alias/>
<Name>ALPLA</Name>
<AddrLine1>CO TRANSPORTATION INSIGHT</AddrLine1>
<AddrLine2>PO BOX 23000</AddrLine2>
<City>HICKORY</City>
<StateProvince>NC</StateProvince>
<PostalCode>28603</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts/>
</Address>
</BillTo>
</Payment>
</Shipment>
</Shipments>
</Load>
</MercuryGate>
</WebImportFile>
</WebImport>
</data>
</service-request>
`;

View File

@@ -0,0 +1,424 @@
import { xmlPayloadTI } from "./tiFullFlow/tiXmlPayload.js";
import axios from "axios";
import querystring from "querystring";
import { getOrderToSend } from "../../../sqlServer/querys/notifications/ti/getOrderToSend.js";
import { getHeaders } from "../../../sqlServer/querys/notifications/ti/getHeaders.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { db } from "../../../../../database/dbclient.js";
import { settings } from "../../../../../database/schema/settings.js";
import { serverData } from "../../../../../database/schema/serverData.js";
import { eq, sql } from "drizzle-orm";
import { notifications } from "../../../../../database/schema/notifications.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { createLog } from "../../../logger/logger.js";
import { freightClass } from "../../../../globalUtils/freightClass.js";
import { delay } from "../../../../globalUtils/delay.js";
const dateCorrection = (newDate: any) => {
return new Date(newDate)
.toLocaleString("en-US", {
timeZone: "UTC",
year: "numeric",
month: "2-digit",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
hourCycle: "h23", // Ensures 24-hour format
})
.replace(",", "");
};
const tiImport = async () => {
//await initializePool();
// get the plant token
const { data: plantData, error: plantError } = await tryCatch(
db.select().from(settings)
);
//await initializePool();
if (plantError) return;
const plantToken = plantData?.filter((n) => n.name === "plantToken");
const { data: plantInfo, error: plantEr } = await tryCatch(
db
.select()
.from(serverData)
.where(eq(serverData.plantToken, plantToken[0].value))
);
// parsing posting window
const plantI = plantInfo!;
const postTime = JSON.parse(plantI[0]?.tiPostTime!);
// order notifications
const { data: notificationSet, error: notificationSettingsErr } =
await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, "tiIntergration"))
);
if (notificationSettingsErr) return;
const notiSet: any = notificationSet;
//creds
const userid = "ALPLAWSTEST";
const password = "oe39U1LuLX9ZdY0XKobG";
// const requestID = `ALPLAPBTEST1`; // production will be alpla01-dateTime - this will be the time it was sent over.
const requestUser = "ALPLAWSTEST"; // if alplaprod_rs -- confirm we can use a user name vs the AlplapIMPORT // needs to stay the same as provied
const customerAccountNum = plantI[0].customerTiAcc as string; // ti
// it we dont get anything here we want to make sure we add it in
// get current releaes not in the already sent oders
let orders = getHeaders;
orders = orders
.replaceAll("test1", plantToken[0].value)
.replaceAll("[from]", notiSet?.notifiySettings.start)
.replaceAll("[to]", notiSet?.notifiySettings.end)
.replaceAll(
"[exclude]",
notiSet.notifiySettings.processed
.map((num: any) => `'${num}'`)
.join(", ")
);
//console.log(orders);
let headerPending = [];
try {
headerPending = await query(orders, "Ti get open headers");
} catch (error) {
console.log(error);
}
if (headerPending.length === 0) {
createLog(
"info",
"notification",
"notify",
"There are no pending orders to be sent over to ti."
);
return {
success: true,
code: 1,
message: "There are no pending orders to be sent over to ti.",
};
}
createLog(
"info",
"notification",
"notify",
`There are a total of ${headerPending.length} to send over`
);
// update query to have the correct plant token
let orderToSend = getOrderToSend;
orderToSend = orderToSend
.replaceAll("test1", plantToken[0].value)
.replaceAll("[releaseToProcess]", `'${headerPending[0].releaseNumber}'`)
.replaceAll("[from]", notiSet.notifiySettings.start)
.replaceAll("[to]", notiSet.notifiySettings.end);
// console.log(orderToSend);
let records = [];
try {
records = await query(orderToSend, "Ti send order");
} catch (error) {
console.log(error);
}
//console.log(headerPending.length);
// update the header
let webHeader = `
<request-id>[requestID]</request-id>
<data>
<WebImport>
<WebImportHeader>
<FileName>[requestID].XML</FileName>
<Type>SOTransportLoader</Type>
<UserName>[requestUser]</UserName>
</WebImportHeader>
`;
webHeader = webHeader.replaceAll(
"[requestID]",
`${records[0].releaseNumber}-${plantToken[0].value}`
);
webHeader = webHeader.replaceAll("[requestUser]", requestUser);
// this part will link into the <ItemGroups></ItemGroups>
let itemGroups = "";
for (let i = 0; i < records.length; i++) {
let newItem = `
<ItemGroup id="" isShipUnit="false" isHandlingUnit="false" sequence="${
i + 1
}">
<ContainedBy id=""/>
<LineItem lineNumber="${i + 1}"/>
<Dimensions>
<Dimension type="Length" uom="IN">${(
records[i].pkgLengh / 25.4
).toFixed(2)}</Dimension>
<Dimension type="Width" uom="IN">${(
records[i].pkgWidth / 25.4
).toFixed(2)}</Dimension>
<Dimension type="Height" uom="IN">${Math.round(
records[i].pkgHeight / 25.4
).toFixed(2)}</Dimension>
</Dimensions>
<Description>${`av ${records[i].article} ${records[i].articleAlias}`}</Description>
<FreightClasses>
<FreightClass type="">${freightClass(
records[i].pkgWeight,
records[i].pkgLengh,
records[i].pkgWidth,
records[i].pkgHeight
)}</FreightClass>
</FreightClasses>
<Commodity/>
<NmfcCode/>
<HazardousMaterial>false</HazardousMaterial>
<HazMatDetail/>
<Weights>
<Weight type="actual" uom="KG">${
records[i].pkgWeight * records[i].Pallets
}</Weight>
</Weights>
<Quantities>
<Quantity type="actual" uom="pallet">${
records[i].Pallets
}</Quantity>
</Quantities>
</ItemGroup>
`;
itemGroups += newItem;
}
// add the full amount of pallets sending over
let fullPalToSend = records.reduce(
(acc: any, o: any) => acc + o.Pallets,
0
);
// rebuild the xml to be properly
let payload = xmlPayloadTI;
payload = payload
.replaceAll(`[WebImportHeader]`, webHeader)
.replaceAll(`[items]`, itemGroups)
.replaceAll(`[customerAccountNum]`, customerAccountNum)
.replaceAll("[fullTotalPal]", fullPalToSend);
// update the main release
//[loadNumber],[shipNumber]
payload = payload.replaceAll(`[shipNumber]`, records[0].releaseNumber);
payload = payload.replaceAll(`[loadNumber]`, records[0].releaseNumber);
// do the multie release if needed
// <ReferenceNumber type="Release Number" isPrimary="false">[multieReleaseNumber]</ReferenceNumber>
let multiRelease = ``;
if (records.length > 0) {
for (let i = 0; i < records.length; i++) {
const newRelease = `
<ReferenceNumber type="Release Number" isPrimary="false">${records[i].releaseNumber}</ReferenceNumber>`;
multiRelease += newRelease;
}
payload = payload.replaceAll("[multieReleaseNumber]", multiRelease);
} else {
payload = payload.replaceAll("[multieReleaseNumber]", "");
}
//update the delivery section
payload = payload.replaceAll(
"[loadingDate]",
dateCorrection(records[0].LoadingDate)
);
payload = payload.replaceAll(
"[deliveryDate]",
dateCorrection(records[0].DeliveryDate)
);
// shipping hours
//<Date type="earliest">[shippingHoursEarly]</Date>
//<Date type="latest">[shippingHoursLate]</Date>
// update teh shipping hours
const now = new Date();
const formattedDate = records[0].LoadingDate.toLocaleDateString("en-US", {
month: "2-digit",
day: "2-digit",
year: "numeric",
});
const shippingHours = JSON.parse(plantI[0]?.shippingHours!);
//console.log(shippingHours);
payload = payload
.replaceAll(
"[shippingHoursEarly]",
`${formattedDate} ${shippingHours[0].early}`
)
.replaceAll(
"[shippingHoursLate]",
`${formattedDate} ${shippingHours[0].late}`
);
payload = payload
.replaceAll("[plantName]", `Alpla ${plantI[0]?.sName!}`)
.replaceAll("[plantStreetAddress]", plantI[0]?.streetAddress!)
.replaceAll("[plantCity]", plantI[0]?.cityState!.split(",")[0])
.replaceAll("[plantState]", plantI[0]?.cityState!.split(",")[1])
.replaceAll("[plantZipCode]", plantI[0]?.zipcode!)
.replaceAll("[contactNum]", plantI[0]?.contactPhone!)
.replaceAll("[contactEmail]", plantI[0]?.contactEmail!)
// customer info
.replaceAll("[customerName]", records[0].addressAlias)
.replaceAll("[customerStreetAddress]", records[0].streetAddress)
.replaceAll("[customerCity]", records[0].city.split(",")[0])
.replaceAll("[customerState]", records[0].city.split(",")[1])
.replaceAll("[customerZip]", records[0].zipCode)
.replaceAll("[customerPO]", records[0].Header)
.replaceAll(
"[glCoding]",
`52410-${
records[0].artileType.toLowerCase() === "preform" ||
records[0].artileType.toLowerCase() === "metalCage"
? 31
: plantI[0].greatPlainsPlantCode
}`
) // {"52410 - " + (artileType.toLowerCase() === "preform" || artileType.toLowerCase() === "metalCage" ? 31: plantInfo[0].greatPlainsPlantCode)}
.replaceAll(
"[pfc]",
`${
records[0].artileType.toLowerCase() === "preform" ||
records[0].artileType.toLowerCase() === "metalCage"
? 40
: records[0].costCenter
}`
)
// special instructions
.replaceAll(
"[specialInstructions]",
`This is a FTL load. The driver will need 2 adjustable load locks to secure the load. The driver will not be loaded without them. Please reference ALPLA pickup ${records[0].Header}`
);
// update the carrier info if any is needed.
// check the address has a real carrier on it and change to true and put the sacs code in
const hasCarrier = true;
console.log(
`Checking if ${records[0].addressAlias} has scac: ${
records[0].remark.split(",")[0] ? "there was one" : "no scac"
}`
);
const priceSheet = `
<PriceSheets>
<PriceSheet type="Carrier" isSelected="${
records[0].remark.split(",")[0] ? "true" : "false"
}">
<ContractId/>
${
records[0].remark.split(",")[0]
? `<SCAC>${records[0].remark
.split(",")[0]
.split(":")[1]
.toUpperCase()}</SCAC>`
: `<SCAC/>`
}
<Mode/>
</PriceSheet>
</PriceSheets>
`;
payload = payload.replaceAll("[priceSheet]", priceSheet);
// console.log(payload);
//await closePool();
//put the xml into a form
const formBody = querystring.stringify({
userid,
password,
request: payload,
});
axios
.post(
"https://t-insightws.mercurygate.net/MercuryGate/common/remoteService.jsp",
formBody,
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
}
)
.then((response) => {
//console.log(response.data)
console.log("Data was sent over to TI");
})
.catch((error) => console.error(error));
// console.log(payload);
// the order is done so we want to update the processed.
// add the new processed order to this
let notiSettingArray = notiSet.notifiySettings;
if (
!notiSettingArray[0].processed.includes(headerPending[0].releaseNumber)
) {
notiSettingArray[0].processed.push(headerPending[0].releaseNumber);
}
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notiSettingArray,
prodID: 1,
},
})
.where(eq(notifications.name, "tiIntergration"))
);
createLog("info", "ti", "notify", "done with this order");
return { success: true, code: 0, message: "done with this order" };
};
// add a running check so we cant flag it twice
export let tiExportRunning = false;
export const runTiImport = async () => {
let finished = false;
let test: any;
tiExportRunning = true;
do {
createLog("info", "ti", "notify", "processing new data");
// code block to be executed
test = await tiImport();
createLog(
"info",
"ti",
"notify",
`Still more to process? ${test.code === 1 ? "No" : "Yes"}`
);
if (test.code === 1) {
finished = true;
}
await delay(1000 * 5);
} while (!finished);
tiExportRunning = false;
};
export default tiImport;

View File

@@ -1,9 +1,17 @@
import { OpenAPIHono } from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
import sendemail from "./routes/sendMail.js"; import sendemail from "./routes/sendMail.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { db } from "../../../database/dbclient.js";
import { notifications } from "../../../database/schema/notifications.js";
import { createLog } from "../logger/logger.js";
import { note, notificationCreate } from "./utils/masterNotifications.js";
import { startNotificationMonitor } from "./utils/processNotifications.js";
import notifyStats from "./routes/getActiveNotifications.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
const routes = [sendemail] as const; const routes = [sendemail, notifyStats] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {
app.route("/notify", route); app.route("/notify", route);
@@ -16,4 +24,37 @@ app.all("/notify/*", (c) => {
}); });
}); });
// check if the mastNotications is changed compared to the db and add if needed.
const { data: notes, error: notesError } = await tryCatch(
db.select().from(notifications)
);
if (notesError) {
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
notesError
)}`
);
}
if (note.length != notes?.length) {
notificationCreate();
createLog("info", "notify", "notify", `New notifcations being added.`);
setTimeout(() => {
startNotificationMonitor();
}, 5 * 1000);
} else {
createLog(
"info",
"notify",
"notify",
`There are know new notifcations. no need to run the update. reminder all changes happen per server.`
);
setTimeout(() => {
startNotificationMonitor();
}, 5 * 1000);
}
export default app; export default app;

View File

@@ -0,0 +1,29 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getAllJobs } from "../utils/processNotifications.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["server"],
summary: "Returns current active notifications.",
method: "get",
path: "/activenotifications",
//middleware: authMiddleware,
responses: responses(),
}),
async (c) => {
const jobs = getAllJobs();
return c.json({
success: true,
message:
jobs.length === 0
? "There are no active Notifications Currently."
: "Current Active notifications",
data: jobs,
});
}
);
export default app;

View File

@@ -0,0 +1,22 @@
// import {Router} from "express";
// import {tiExportRunning, runTiImport} from "../../notification/notification/tiFullFlow/tiImports.js";
// const router = Router();
// router.get("/tiTrigger", async (req, res): Promise<void> => {
// if (tiExportRunning) {
// res.status(200).json({
// success: false,
// message: "There is already a current sesion of the Export running please try again later.",
// });
// }
// // trigger the import
// runTiImport();
// res.status(200).json({
// success: true,
// message: "The Ti Export has been manually started and will continue to run in the background.",
// });
// });
// export default router;

View File

@@ -0,0 +1,113 @@
import { db } from "../../../../database/dbclient.js";
import { notifications } from "../../../../database/schema/notifications.js";
import { createLog } from "../../logger/logger.js";
export const note: any = [
{
name: "reprintLabels",
description:
"Monitors the labels that are printed and returns a value if one falls withing the time frame defined below.",
checkInterval: 1,
timeType: "min",
emails: "",
active: false,
notifiySettings: { prodID: 1 },
},
{
name: "downTimeCheck",
description:
"Checks for specific downtimes that are greater than 105 min.",
checkInterval: 30,
timeType: "min",
emails: "",
active: false,
notifiySettings: { prodID: 1, daysInPast: 5, duration: 105 },
},
{
name: "qualityBlocking",
description:
"Checks for new blocking orders that have been entered, recommened to get the most recent order in here before activating.",
checkInterval: 30,
timeType: "min",
emails: "",
active: false,
notifiySettings: { prodID: 1, sentBlockingOrders: [1] },
},
{
name: "productionCheck",
description: "Checks ppoo",
checkInterval: 2,
timeType: "hour",
emails: "",
active: false,
notifiySettings: {
prodID: 1,
count: 0,
weekend: false,
locations: "0",
},
},
{
name: "stagingCheck",
description:
"Checks staging based on locations, locations need to be seperated by a ,",
checkInterval: 2,
timeType: "hour",
emails: "",
active: false,
notifiySettings: {
prodID: 1,
count: 0,
weekend: false,
locations: "0",
},
},
{
name: "tiIntergration",
description: "Checks for new releases to be put into ti",
checkInterval: 2,
timeType: "hour",
emails: "",
active: false,
notifiySettings: {
prodID: 1,
start: 36,
end: 720,
releases: [1, 2, 3],
},
},
{
name: "exampleNotification",
description: "Checks for new releases to be put into ti",
checkInterval: 2,
timeType: "min",
emails: "",
active: true,
notifiySettings: {
prodID: 1,
start: 36,
end: 720,
releases: [1, 2, 3],
},
},
];
export const notificationCreate = async () => {
for (let i = 0; i < note.length; i++) {
try {
const notify = await db
.insert(notifications)
.values(note[i])
.onConflictDoNothing();
} catch (error) {
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
error
)}`
);
}
}
};

View File

@@ -0,0 +1,158 @@
import { db } from "../../../../database/dbclient.js";
import { notifications } from "../../../../database/schema/notifications.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { Cron } from "croner";
// Store active timeouts by notification ID
export let runningNotifications: Record<string, Cron> = {};
export const startNotificationMonitor = async () => {
// if restarted or crashed we need to make sure the running notifications is cleared
createLog("info", "notify", "notify", `Notification system is now active.`);
setInterval(async () => {
const { data, error } = await tryCatch(db.select().from(notifications));
if (error) {
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
error
)}`
);
}
const notes: any = data;
for (const note of notes) {
//if we get deactivated remove it.
if (runningNotifications[note.name] && !note.active) {
createLog(
"info",
"notify",
"notify",
`${note.name} was just deactivated`
);
removeNotification(note.name);
}
// if we are not active, no emails, and already in place just stop.
if (
!note.active ||
note.emails === "" ||
runningNotifications[note.name]
) {
//console.log(`Skipping ${note.name} hes already scheduled`);
continue;
}
let time = `*/30 * * * *`; // default to be every 30 min
if (note.timeType === "min") {
console.log(`Creating the min mark here`);
time = `*/${note.checkInterval} * * * *`;
}
if (note.timeType === "hour") {
console.log(`Creating the hour mark here`);
time = `* */${note.checkInterval} * * *`;
}
createJob(note.name, time, async () => {
try {
const { default: runFun } = await import(
`../controller/notifications/${note.name}.js`
);
await runFun(note);
} catch (error: any) {
createLog(
"error",
"notify",
note.name,
`Error running notification: ${error.message}`
);
}
});
//testParse(runningNotifcations[note.name]);
}
}, 5 * 1000);
};
const createJob = (id: string, schedule: string, task: () => Promise<void>) => {
// Destroy existing job if it exists
if (runningNotifications[id]) {
runningNotifications[id].stop(); // Croner uses .stop() instead of .destroy()
}
// Create new job with Croner
runningNotifications[id] = new Cron(
schedule,
{
timezone: "America/Chicago",
catch: true, // Prevents unhandled rejections
},
task
);
// Optional: Add error handling (Croner emits 'error' events)
// runningNotifications[id].on("error", (err) => {
// console.error(`Job ${id} failed:`, err);
// });
};
interface JobInfo {
id: string;
schedule: string;
nextRun: Date | null;
isRunning: boolean;
}
export const getAllJobs = (): JobInfo[] => {
return Object.entries(runningNotifications).map(([id, job]) => ({
id,
schedule: job.getPattern() || "invalid",
nextRun: job.nextRun() || null,
lastRun: job.previousRun() || null,
isRunning: job ? !job.isStopped() : false,
}));
};
const removeNotification = (id: any) => {
if (runningNotifications[id]) {
runningNotifications[id].stop();
delete runningNotifications[id];
}
};
export const stopAllJobs = () => {
Object.values(runningNotifications).forEach((job: any) => job.stop());
runningNotifications = {}; // Clear the object
};
/*
// Pause a job
app.post("/api/jobs/:id/pause", (req, res) => {
runningNotifications[req.params.id]?.pause();
res.json({ success: true });
});
// Resume a job
app.post("/api/jobs/:id/resume", (req, res) => {
runningNotifications[req.params.id]?.resume();
res.json({ success: true });
});
// Delete a job
app.delete("/api/jobs/:id", (req, res) => {
runningNotifications[req.params.id]?.stop();
delete runningNotifications[req.params.id];
res.json({ success: true });
});
*/

View File

@@ -0,0 +1,46 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>The below downtimes have exceeded the max requested limit of {{secondarySetting.duration}}min</p>
<table >
<thead>
<tr>
<th>totalDuration</th>
<th>machineAlias</th>
<th>CTO_Code</th>
<th>Downtime_Description</th>
<th>groupDesc</th>
<th>remark</th>
<th>Downtime start</th>
<th>Downtime finish</th>
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{totalDuration}}</td>
<td>{{machineAlias}}</td>
<td>{{CTO_Code}}</td>
<td>{{Downtime_Description}}</td>
<td>{{groupDesc}}</td>
<td>{{remark}}</td>
<td>{{dtStart}}</td>
<td>{{dtEnd}}</td>
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,41 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>There are currently {{count}} pallets sitting in ppoo that are older than {{checkTime}} {{timeCheck}}.</p>
<table >
<thead>
<tr>
<th>Article</th>
<th>Description</th>
<th>Lot</th>
<th>ProductionDate</th>
<th>Running Number</th>
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{av}}</td>
<td>{{articleDescription}}</td>
<td>{{lot}}</td>
<td>{{productionDate}}</td>
<td>{{runningNumber}}</td>
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,74 @@
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
<style>
.email-wrapper {
max-width: 80%; /* Limit width to 80% of the window */
margin: 0 auto; /* Center the content horizontally */
}
.email-table {
width: 100%;
border-collapse: collapse;
}
.email-table td {
vertical-align: top;
padding: 10px;
border: 1px solid #000;
border-radius: 25px; /* Rounded corners */
background-color: #f0f0f0; /* Optional: Add a background color */
}
.email-table h2 {
margin: 0;
}
.remarks {
border: 1px solid black;
padding: 10px;
background-color: #f0f0f0;
border-radius: 25px;
}
</style>
</head>
<body>
<div class="email-wrapper">
<p>All,</p>
<p>Please see the new blocking order that was created.</p>
{{#each items}}
<div>
<div class="email-table">
<table>
<tr>
<td>
<p><strong>Blocking number: </strong>{{blockingNumber}}</p>
<p><strong>Blocking Date: </strong>{{blockingDate}}</p>
<p><strong>Article: </strong>{{av}}</p>
<p><strong>Production Lot: </strong>{{lotNumber}}</p>
<p><strong>Line: </strong>{{line}}</p>
</td>
<td>
<p><strong>Customer: </strong>{{customer}}</p>
<p><strong>Blocked pieces /LUs: </strong>{{peicesAndLoadingUnits}}</p>
<p><strong>Main defect group: </strong>{{mainDefectGroup}}</p>
<p><strong>Main defect: </strong>{{mainDefect}}</p>
</td>
</tr>
</table>
</div>
</div>
<div class="remarks">
<h4>Remarks:</h4>
<p>{{remark}}</p>
</div>
</div>
{{/each}}
<br>
<p>For further questions please reach out to quality.</p> <br>
<p>Thank you,</p> <br>
<p>Quality Department</p>
</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,50 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>The below labels have been reprinted.</p>
<table >
<thead>
<tr>
<th>AV</th>
<th>Desciption</th>
<th>Label Number</th>
<th>Date Added</th>
<th>User that created</th>
<th>Last time label was printed/updated in the system</th>
<th>Who printed/Updated</th>
<th>What printer it came from</th>
<th>Total reprinted labels</th>
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{av}}</td>
<td>{{alias}}</td>
<td>{{runningNumber}}</td>
<td>{{Add_Date}}</td>
<td>{{Add_User}}</td>
<td>{{Upd_Date}}</td>
<td>{{Upd_User}}</td>
<td>{{printer}}</td>
<td>{{totalPrinted}}</td>
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -0,0 +1,41 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>There are currently {{count}} pallets sitting in staging that are older than {{checkTime}} {{timeCheck}}.</p>
<table >
<thead>
<tr>
<th>Article</th>
<th>Description</th>
<th>Lot</th>
<th>ProductionDate</th>
<th>Running Number</th>
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{av}}</td>
<td>{{articleDescription}}</td>
<td>{{lot}}</td>
<td>{{productionDate}}</td>
<td>{{runningNumber}}</td>
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -1,13 +1,17 @@
import type {User} from "../../../types/users.js"; import type { User } from "../../../types/users.js";
import {alplaStockInv} from "./cycleCount/alplaStockInventory.js"; import { alplaStockInv } from "./cycleCount/alplaStockInventory.js";
import {emptyCount} from "./cycleCount/emptyCycleCount.js"; import { emptyCount } from "./cycleCount/emptyCycleCount.js";
import {fullLaneCount} from "./cycleCount/fullLaneCycleCount.js"; import { fullLaneCount } from "./cycleCount/fullLaneCycleCount.js";
import {ocmeInv} from "./cycleCount/ocmeInventory.js"; import { ocmeInv } from "./cycleCount/ocmeInventory.js";
export const prepareLane = "https://usday1prod.alpla.net/application/public/v1.1/Warehousing/PrepareLaneForInventory"; export const prepareLane =
export const openLane = "https://usday1prod.alpla.net/application/public/v1.0/Warehousing/InventoryOpen"; "https://usday1prod.alpla.net/application/public/v1.1/Warehousing/PrepareLaneForInventory";
export const closeLane = "https://usday1prod.alpla.net/application/public/v1.0/Warehousing/InventoryClose"; export const openLane =
export const releaseLane = "https://usday1prod.alpla.net/application/public/v1.1/Warehousing/ReleaseLaneFromInventory"; "https://usday1prod.alpla.net/application/public/v1.0/Warehousing/InventoryOpen";
export const closeLane =
"https://usday1prod.alpla.net/application/public/v1.0/Warehousing/InventoryClose";
export const releaseLane =
"https://usday1prod.alpla.net/application/public/v1.1/Warehousing/ReleaseLaneFromInventory";
export const scannerID = 500; export const scannerID = 500;
export const cycleCount = async (lane: any, user: User) => { export const cycleCount = async (lane: any, user: User) => {
/** /**
@@ -24,29 +28,39 @@ export const cycleCount = async (lane: any, user: User) => {
// create a new array that has the merge happen. // create a new array that has the merge happen.
const mergeOcmeData = ocme.map((d: any) => { const mergeOcmeData = ocme.map((d: any) => {
// check if its in the ocme array we add it // check if its in the ocme array we add it
const inStock = alplaStock.filter((r: any) => r.runningNumber === d.runningNumber); const inStock = alplaStock.filter(
(r: any) => r.runningNumber === d.runningNumber
);
//console.log(inStock); //console.log(inStock);
if (inStock.length != 0) { if (inStock.length != 0) {
//console.log(`${d.runningNumber} is good`); //console.log(`${d.runningNumber} is good`);
return {...d, ocme: "Yes", stock: "Yes", info: "Good"}; return { ...d, ocme: "Yes", stock: "Yes", info: "Good" };
} else { } else {
//console.log(`${d.runningNumber} is bad`); //console.log(`${d.runningNumber} is bad`);
return {...d, ocme: "Yes", stock: "No", info: "Quality Check Required"}; return {
...d,
ocme: "Yes",
stock: "No",
info: "Validate pallet is ok. ",
};
} }
}); });
const mergeStockData = alplaStock const mergeStockData = alplaStock
.filter((r: any) => !ocme.some((d: any) => d.runningNumber === r.runningNumber)) .filter(
.map((r: any) => { (r: any) =>
return { !ocme.some((d: any) => d.runningNumber === r.runningNumber)
...r, )
ocme_laneLevelID: "", .map((r: any) => {
sscc: "", return {
ocme: "No", ...r,
stock: "Yes", ocme_laneLevelID: "",
info: "Sent to Inv", sscc: "",
}; ocme: "No",
}); stock: "Yes",
info: "Sent to Inv",
};
});
const combineBoth = [...mergeOcmeData, ...mergeStockData]; const combineBoth = [...mergeOcmeData, ...mergeStockData];

View File

@@ -1,10 +1,12 @@
// import {Router} from 'express' // import {Router} from 'express'
// import { runQuery } from '../utils/sql/config/sqlConfig.js' // import { runQuery } from '../utils/sql/config/sqlConfig.js'
// import { ocmeInventory } from '../utils/sql/querys/getOCMEInventory.js' // import { ocmeInventory } from '../utils/sql/querys/getOCMEInventory.js'
// import { getLanes } from '../utils/sql/querys/getLanes.js'
// const router = Router() // const router = Router()
// router.post('/getLaneData', async (req,res)=>{ // router.post('/getLaneData', async (req,res)=>{
// console.log(`Getting lane data`)
// let filterdOCMELane // let filterdOCMELane
// // the body will need to have 1 lane in it. lane: 'B050' // // the body will need to have 1 lane in it. lane: 'B050'
@@ -12,30 +14,58 @@
// // add the lane in so we dont crash // // add the lane in so we dont crash
// if(req.body.lane){ // if(req.body.lane){
// if(data.laneType === "laneID"){ // if(data.laneType === "laneID"){
// get the mapped lane id // // get the mapped lane id
// where alpla_laneID = 30286 // // where alpla_laneID = 30286
// filterdOCMELane = ocmeInventory.replaceAll("where alpla_laneDescription = '[lane]'", `where alpla_laneID = ${data.lane}`) // console.log("getting lane by id")
// filterdOCMELane = ocmeInventory.replaceAll("where alpla_laneDescription = '[lane]'", `where alpla_laneID = ${data.lane}`)
// } else { // } else {
// filterdOCMELane = ocmeInventory.replaceAll('[lane]', data.lane) // console.log("getting lane by name")
// } // filterdOCMELane = ocmeInventory.replaceAll('[lane]', data.lane)
// }
// //
// // get lanes // // get lanes
// const laneData = await runQuery(filterdOCMELane, 'Getting current ocme lanes linked') // try {
// res.status(200).json({success: true,message: `All current lanes from the ocme system.`, totalpallets: laneData.length,data: laneData}) // const laneData = await runQuery(filterdOCMELane, 'Getting current ocme lanes linked')
// return
// let inventory = laneData
// // if we do not have anything then we will send just the name and id so we can at least do the count.
// if(laneData.length === 0){
// console.log(`Sending the name and id over. for an empty count`)
// const lanes = await runQuery(getLanes, 'Getting current ocme lanes linked')
// let filterLane
// if(data.laneType === "laneID"){
// filterLane = lanes.filter(n => n.alpla_laneID.includes(req.body.lane))
// }else {
// filterLane = lanes.filter(n => n.alpla_laneDescription === req.body.lane.toUpperCase())
// }
// inventory = [{alpla_laneDescription: filterLane[0].alpla_laneDescription,alpla_laneID: filterLane[0].alpla_laneID }]
// console.log(filterLane, inventory)
// }
// return res.status(200).json({success: true,message: `All current lanes from the ocme system.`, totalpallets: inventory.length,data: inventory})
// } catch (error) {
// console.log(error)
// return res.status(400).json({success: false,message: `Error getting the lane data`, error})
// }
// } // }
// // if the plant sends the id over lets change the entire where statement. // // if the plant sends the id over lets change the entire where statement.
// if(req.body.laneID){ // // if(req.body.laneID){
// console.log(`Getting the lane data by id`) // // console.log(`Getting the lane data by id`)
// filterdOCMELane = ocmeInventory.replaceAll(`alpla_laneDescription = '[lane]'`, `alpla_laneID = ${data.laneID}`) // // filterdOCMELane = ocmeInventory.replaceAll(`alpla_laneDescription = '[lane]'`, `alpla_laneID = ${data.laneID}`)
// // get lanes // // // get lanes
// const laneData = await runQuery(filterdOCMELane, 'Getting current ocme lanes linked') // // const laneData = await runQuery(filterdOCMELane, 'Getting current ocme lanes linked')
// res.status(200).json({success: true,message: `All current lanes from the ocme system.`, totalpallets: laneData.length,data: laneData}) // // res.status(200).json({success: true,message: `All current lanes from the ocme system.`, totalpallets: laneData.length,data: laneData})
// return // // return
// } // // }
// // if everything is missing we send false // // if everything is missing we send false
// res.status(200).json({success: false,message: `Missing either lane name or lane id.`}) // res.status(200).json({success: false,message: `Missing either lane name or lane id.`})

View File

@@ -1,6 +1,4 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js"; import { db } from "../../../../../database/dbclient.js";
import { printers } from "../../../../../database/schema/printers.js";
import { settings } from "../../../../../database/schema/settings.js"; import { settings } from "../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js"; import { createLog } from "../../../logger/logger.js";

View File

@@ -80,11 +80,12 @@ export const labelerTagRead = async (tagData: any) => {
} else { } else {
currentPalletCheck = 0; currentPalletCheck = 0;
createLog( createLog(
"error", "warn",
"dyco", "dyco",
"ocp", "ocp",
`You have reached 20 pallets since the last check please validate the labeler is still in sync.` `You have reached 20 pallets since the last check please validate the labeler is still in sync.`
); );
return;
} }
} }

View File

@@ -4,21 +4,27 @@ import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../../../logger/logger.js"; import { createLog } from "../../../../../logger/logger.js";
// strapper related issues // strapper related issues
export let strapperActive = true; export let strapperActive = false;
export let stapperFaulted = false; export let stapperFaulted = false;
export let strapperFaultCount = 3; // move to db so we can control it outside the app export let strapperFaultCount = 3; // move to db so we can control it outside the app
let alreadyAlerted = false;
export const strapperFaults = async (tagData: any) => { export const strapperFaults = async (tagData: any) => {
const { data, error } = await tryCatch(db.select().from(settings)); const { data, error } = await tryCatch(db.select().from(settings));
// failed to get settings
if (error) { if (error) {
return { success: false, message: "Failed to get settings." }; return { success: false, message: "Failed to get settings." };
} }
const strapperCheckSetting = data.filter((n) => n.name === "strapperCheck"); const strapperCheckSetting = data.filter((n) => n.name === "strapperCheck");
// strapper error is off
if (strapperCheckSetting[0]?.value === "0") {
return;
}
// strapper error is on
if (strapperCheckSetting[0]?.value === "1") { if (strapperCheckSetting[0]?.value === "1") {
// faulted and still has a check or 2 to go
if (stapperFaulted && strapperFaultCount > 0) { if (stapperFaulted && strapperFaultCount > 0) {
createLog( createLog(
"warn", "warn",
@@ -26,13 +32,13 @@ export const strapperFaults = async (tagData: any) => {
"ocp", "ocp",
`There was a strapper error, remaining pallets to check ${strapperFaultCount}.` `There was a strapper error, remaining pallets to check ${strapperFaultCount}.`
); );
alreadyAlerted = true;
strapperFaultCount = strapperFaultCount - 1; strapperFaultCount = strapperFaultCount - 1;
return { return {
success: false, success: true,
message: `There was a strapper error, remaining pallets to check ${strapperFaultCount}.`, message: `There was a strapper error, remaining pallets to check ${strapperFaultCount}.`,
}; };
} else { } else {
// no more checks needed clearing everything
createLog( createLog(
"debug", "debug",
"dyco", "dyco",
@@ -45,30 +51,36 @@ export const strapperFaults = async (tagData: any) => {
strapperFaultCount = 3; // move to db as well strapperFaultCount = 3; // move to db as well
return { return {
success: true, success: false,
message: `Strapper check is active but not faulted, remaining pallets to check ${strapperFaultCount}.`, message: `Strapper check is active but not faulted, remaining pallets to check ${strapperFaultCount}.`,
}; };
} }
} }
if (strapperActive) { // strapper was triggered turning on the counter.
// monitor strapper //console.log(`Strapper fault is ${strapperError.value}`);
//console.log(`Strapper fault is ${strapperError.value}`); if (tagData.value && strapperFaultCount > 0) {
if (tagData.value && strapperFaultCount > 0) { // strapper faulted we want to start the trigger to force the check
// strapper faulted we want to start the trigger to force the check if (!stapperFaulted) {
if (!stapperFaulted) { createLog(
createLog( "error",
"error", "dyco",
"dyco", "ocp",
"ocp", `Strapper errored triggering, manual checks will be required for the next ${strapperFaultCount}`
`Strapper errored triggering manual checks will be required for the next ${strapperFaultCount}` );
);
}
stapperFaulted = true; stapperFaulted = true;
alreadyAlerted = true;
// change move fault count to db.... // change move fault count to db....
strapperFaultCount = 3; strapperFaultCount = 3;
return {
success: true,
message: `Strapper errored triggering, manual checks will be required for the next ${strapperFaultCount}`,
};
} }
} }
return {
success: false,
message: `Some how we made it here and just going to say we are good. :)`,
};
}; };

View File

@@ -1,6 +1,3 @@
import { db } from "../../../../../../database/dbclient.js";
import { settings } from "../../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../../logger/logger.js"; import { createLog } from "../../../../logger/logger.js";
import { query } from "../../../../sqlServer/prodSqlServer.js"; import { query } from "../../../../sqlServer/prodSqlServer.js";
import { machineCheck } from "../../../../sqlServer/querys/ocp/machineId.js"; import { machineCheck } from "../../../../sqlServer/querys/ocp/machineId.js";

View File

@@ -153,7 +153,7 @@
"contactPhone": "6366970253", "contactPhone": "6366970253",
"customerTiAcc": "ALPL01MCDINT", "customerTiAcc": "ALPL01MCDINT",
"lstServerPort": "4000", "lstServerPort": "4000",
"active": false, "active": true,
"serverLoc": "E:\\LST\\lstv2", "serverLoc": "E:\\LST\\lstv2",
"oldVersion": "E:\\LST\\lst_backend", "oldVersion": "E:\\LST\\lst_backend",
"shippingHours": "[{\"early\": \"06:30\", \"late\": \"23:00\"}]", "shippingHours": "[{\"early\": \"06:30\", \"late\": \"23:00\"}]",

View File

@@ -175,6 +175,22 @@ const newSettings = [
roles: "admin", roles: "admin",
module: "ocp", module: "ocp",
}, },
{
name: "v1SysServer",
value: `localhost`,
description:
"The remaining v1 stuff here until we finish the frontend here.",
serviceBelowsTo: "system",
roleToChange: "admin",
},
{
name: "v1SysPort",
value: `4000`,
description:
"The remaining v1 stuff here until we finish the frontend here.",
serviceBelowsTo: "system",
roleToChange: "admin",
},
]; ];
export const areSettingsIn = async () => { export const areSettingsIn = async () => {
// get the roles // get the roles
@@ -192,7 +208,7 @@ export const areSettingsIn = async () => {
"info", "info",
"lst", "lst",
"server", "server",
"Settingss were just added due to missing them on server startup" "Settings were just added due to missing them on server startup"
); );
} catch (error) { } catch (error) {
createLog( createLog(

View File

@@ -27,7 +27,7 @@ export const setPerms = async () => {
`${data[0].value} will not have its permissions updated as it is not the test server.` `${data[0].value} will not have its permissions updated as it is not the test server.`
); );
} }
const scriptPath = `E:\\LST\\lstv2\\server\\scripts\\update.ps1 `; const scriptPath = `E:\\LST\\lstv2\\dist\\server\\scripts\\updatePermissions.ps1 `;
const args = [ const args = [
"-NoProfile", "-NoProfile",

View File

@@ -0,0 +1,140 @@
export const activeArticle = `
SELECT V_Artikel.IdArtikelvarianten,
V_Artikel.Bezeichnung,
V_Artikel.ArtikelvariantenTypBez,
V_Artikel.PreisEinheitBez,
case when sales.price is null then 0 else sales.price end as salesPrice,
TypeOfMaterial=CASE
WHEN
V_Artikel.ArtikelvariantenTypBez LIKE'%Additive'
Then 'AD'
when V_Artikel.ArtikelvariantenTypBez Like '%Masterbatch'
THEN 'MB'
WHEN V_Artikel.ArtikelvariantenTypBez ='Pallet' or
V_Artikel.ArtikelvariantenTypBez ='Top' or
V_Artikel.ArtikelvariantenTypBez ='Bags' or
V_Artikel.ArtikelvariantenTypBez ='Bag' or
V_Artikel.ArtikelvariantenTypBez ='Stretch Wrap' or
V_Artikel.ArtikelvariantenTypBez ='Stretch Film' or
V_Artikel.ArtikelvariantenTypBez ='Banding Materials' or
V_Artikel.ArtikelvariantenTypBez ='Carton' or
V_Artikel.ArtikelvariantenTypBez ='Re-Shipper Box' or
V_Artikel.ArtikelvariantenTypBez ='Label' or
V_Artikel.ArtikelvariantenTypBez ='Pallet Label' or
V_Artikel.ArtikelvariantenTypBez ='Carton Label' or
V_Artikel.ArtikelvariantenTypBez ='Liner' or
V_Artikel.ArtikelvariantenTypBez ='Dose Cup' or
V_Artikel.ArtikelvariantenTypBez ='Metal Cage' or
V_Artikel.ArtikelvariantenTypBez ='Spout' or
V_Artikel.ArtikelvariantenTypBez = 'Slip Sheet' or
V_Artikel.ArtikelvariantenTypBez = 'Palet' or
V_Artikel.ArtikelvariantenTypBez = 'LID' or
V_Artikel.ArtikelvariantenTypBez= 'Metal' or
V_Artikel.ArtikelvariantenTypBez= 'Corner post' or
V_Artikel.ArtikelvariantenTypBez= 'Bottle Label' or
V_Artikel.ArtikelvariantenTypBez = 'Paper label' or
V_Artikel.ArtikelvariantenTypBez = 'Banding' or
V_Artikel.ArtikelvariantenTypBez = 'Glue' or
V_Artikel.ArtikelvariantenTypBez = 'Top Frame' or
V_Artikel.ArtikelvariantenTypBez = 'IML Label' or
V_Artikel.ArtikelvariantenTypBez = 'Purch EBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'Purch Spout'
THEN 'PKG'
WHEN V_Artikel.ArtikelvariantenTypBez='HD-PE' or
V_Artikel.ArtikelvariantenTypBez='HD-PE PCR' or
V_Artikel.ArtikelvariantenTypBez='HD-PP' or
V_Artikel.ArtikelvariantenTypBez= 'PP' or
V_Artikel.ArtikelvariantenTypBez LIKE '%PCR' or
V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PP' or
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PET' or
V_Artikel.ArtikelvariantenTypBez= 'PET-P'
THEN 'MM'
WHEN
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or
V_Artikel.ArtikelvariantenTypBez='$Waste Container' or
V_Artikel.ArtikelvariantenTypBez='Mixed-Waste' or
V_Artikel.ArtikelvariantenTypBez LIKE'%-Waste%'
THEN 'Waste'
WHEN
V_Artikel.ArtikelvariantenTypBez = 'Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'SBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'EBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'ISBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'Decorated Bottle'
THEN 'Bottle'
WHEN V_Artikel.ArtikelvariantenTypBez = 'Preform'
Then 'Preform'
When
V_Artikel.ArtikelvariantenTypBez = 'Purchased Preform' or
V_Artikel.ArtikelvariantenTypBez = 'Purchased Caps' or
V_Artikel.ArtikelvariantenTypBez = 'Purchased_preform'
THEN 'Purchased_preform'
When
V_Artikel.ArtikelvariantenTypBez = 'Closures'
THEN 'Caps'
When
V_Artikel.ArtikelvariantenTypBez = 'Dummy'
THEN 'Not used'
ELSE 'Item not defined' END
,V_Artikel.IdArtikelvariantenTyp,
Round(V_Artikel.ArtikelGewicht, 3) as Article_Weight,
IdAdresse,
AdressBez,
AdressTypBez,
ProdBereichBez,
FG=case when
V_Artikel.ProdBereichBez = 'SBM' or
V_Artikel.ProdBereichBez = 'IM-Caps' or
V_Artikel.ProdBereichBez = 'IM-PET' or
V_Artikel.ProdBereichBez = 'PRINT OFFICE' or
V_Artikel.ProdBereichBez = 'EBM' or
V_Artikel.ProdBereichBez = 'ISBM'
Then 'FG'
Else 'not Defined Profit Center'
end,
V_Artikel.Umlaeufe as num_of_cycles,
V_FibuKonten_BASIS.FibuKontoNr as CostsCenterId,
V_FibuKonten_BASIS.Bezeichnung as CostCenterDescription,
sales.[KdArtNr] as CustomerArticleNumber,
sales.[KdArtBez] as CustomerArticleDescription,
round(V_Artikel.Zyklus, 2) as CycleTime,
Sypronummer as salesAgreement
--,*
FROM AlplaPROD_test1.dbo.V_Artikel (nolock)
join
AlplaPROD_test1.dbo.V_Artikelvarianten on AlplaPROD_test1.dbo.V_Artikel.IdArtikelvarianten =
AlplaPROD_test1.dbo.V_Artikelvarianten.IdArtikelvarianten
join
AlplaPROD_test1.dbo.V_FibuKonten_BASIS on AlplaPROD_test1.dbo.V_Artikelvarianten.IdFibuKonto =
AlplaPROD_test1.dbo.V_FibuKonten_BASIS.IdFibuKonto
-- adding in the sales price
left join
(select * from
(select
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
IdArtikelvarianten as av
,GueltigabDatum as validDate
,VKPreis as price
,[KdArtNr]
,[KdArtBez]
--,*
from AlplaPROD_test1.dbo.T_HistoryVK (nolock)
where
--GueltigabDatum > getDate() - 120
--and
Aktiv = 1
and StandardKunde = 1 -- default address
) a
where RN = 1) as sales
on AlplaPROD_test1.dbo.V_Artikel.IdArtikelvarianten = sales.av
where V_Artikel.aktiv = 1
order by V_Artikel.IdArtikelvarianten /*, TypeOfMaterial */
`;

View File

@@ -0,0 +1,25 @@
export let getHeaders = `
select AuftragsNummer as header,
IdAuftragsAbruf as releaseNumber,
AbrufLiefertermin as delDate
FROM alplaprod_test1.dbo.V_TrackerAuftragsAbrufe (nolock) b
left join
(
select IdAdressen addressID,
x.Bezeichnung as name,
c.Bezeichnung as deliveryCondition,
c.Kurzbezeichnung as Abbreviation
from AlplaPROD_test1.dbo.t_Adressen (nolock) x
left join
AlplaPROD_test1.[dbo].[T_Lieferkonditionen] (nolock) c
on x.IdLieferkondition = c.IdLieferkondition
) x
on b.IdAdresse = x.addressID
WHERE AbrufStatus = 1 and
AbrufLiefertermin between DATEADD(HOUR, -[from], GETDATE()) and DATEADD(HOUR, [to], GETDATE()) -- this number will be grabbed from the db with a default of 24hours
and x.Abbreviation not in ('exw')
and IdAuftragsAbruf not in ([exclude])
`;

View File

@@ -0,0 +1,149 @@
export let getOrderToSend = `
select * from (
Select IdAdresse as addressId,
LieferAdressBez as addressAlias,
LEFT(ArtikelVariantenAlias, charindex(' ', ArtikelVariantenAlias) - 1) item,
IdArtikelVarianten as article,
ArtikelVariantenAlias as articleAlias,
IdAuftragsAbruf as releaseNumber,
AuftragsNummer AS Header,
AuftragsNummer as CustomerLineItemNo,
AbrufNummer AS CustomerReleaseNumber,
AbrufMengeVPK AS Pallets,
AbrufMenge AS QTY,
IdAdresse AS CUSTOMERID,
AbrufLadeDatum AS LoadingDate,
AbrufLiefertermin AS DeliveryDate
,carrierAV
,singleTrip
,roundTrip
,countryAv
,zipCode
,streetAddress
,city -- split on here by ,
--,OrderStatus = 'loading'
,ac.costCenter -- also called pfc
,pkg.pkgHeight
,pkg.pkgLengh
,pkg.pkgWidth
,ROUND((ac.weight * pkg.palletCount / 1000) + pkg.totalPKGWeight,2)as pkgWeight
,AbrufStatus as status
,remark
,ac.artileType
--,*
FROM alplaprod_test1.dbo.V_TrackerAuftragsAbrufe (nolock) x
--av info
left join
(SELECT [IdArtikelvarianten] as article
,[FibuKontenKontoNr] as costCenter
,ArtikelGewicht as weight,
s.pkgId
,artikelvariantentypbez as artileType
FROM [AlplaPROD_test1].[dbo].[V_Artikelvarianten_BASIS] (nolock) x
left join
(
select * from (select
ROW_NUMBER() OVER(PARTITION BY [IdArtikelvarianten] ORDER BY gueltigabDatum DESC) AS rn
,[IdArtikelvarianten] as article
,IdVpkVorschrift as pkgId
from [AlplaPROD_test1].[dbo].[T_HistoryVK] (nolock)) a where rn = 1
) as s
on
x.[IdArtikelvarianten] = s.article
) as ac
on
x.IdArtikelVarianten = ac.article
-- transport part of query
left join
(SELECT [IdHistoryTransportkosten]
,[IdLieferadresse] as customerAddressAV
,[IdSpediteuradresse] as carrierAV
,[GueltigabDatum] as validDate
,[Einzelfahrtkosten] as singleTrip
,[Rundfahrtkosten] as roundTrip
,[EinzelfahrtkostenProKubikmeter] as singletripCostsperCubicMeter
,[RundfahrtkostenProKubikmeter] as roundSingletripCostsperCubicMeter
,[Standard] as standard
,[Aktiv] as active
--,[FWEinzelfahrtkosten]
--,[FWRundfahrtkosten]
--,[FWEinzelfahrtkostenProKubikmeter]
--,[FWRundfahrtkostenProKubikmeter]
FROM [AlplaPROD_test1].[dbo].[T_HistoryTransportkosten] (nolock)
where Standard = 1 and Aktiv = 1) as carrier
on x.IdAdresse = carrier.customerAddressAV
-- address stuff
left join
(SELECT [IdAdressen] as addressAV
,[IdAdressentyp] as addressType -- 1 customer,2 supplier, 4 transport
--,[IdZahlKond]
--,[IdMwst]
,[Bezeichnung] as addressName
,[IdStaaten] as countryAv
,[PLZ] as zipCode
,[Strasse] as streetAddress
,[PLZPostfach] as poBox
,[Postfach] as mailbox
,[Ort] as city
,[Tel] as customerPhone
,[DebNr] as debitorNr
,xy.[Bonus] as bonus
,[Bemerkung] as remark
,[Aktiv] as active
,Entfernung as distanceKM
,Transportzeit as transportTime
,IdLieferkondition as deliveryCondtionAV
,delc.deliveryContionAlias
,delc.deliveryContionAbv
--,ac.costCenter
FROM [AlplaPROD_test1].[dbo].[T_Adressen] (nolock) xy
--delivery condtion details
left join
(SELECT [IdLieferkondition] as deliveryCondtionAV
,[Bezeichnung] as deliveryContionAlias
,[Kurzbezeichnung] as deliveryContionAbv
,[Bemerkung] as deliveryContionRemark
,[Aktiv] as active
FROM [AlplaPROD_test1].[dbo].[T_Lieferkonditionen] (nolock)) as delC
on xy.IdLieferkondition = delC.deliveryCondtionAV
) as del
on
x.IdAdresse = del.addressAV
-- pkg info
left join
(
SELECT [IdVpkVorschrift] as pkgId
,[Aktiv] as active
,[Bezeichnung] as alias
,[AnzahlAVProVpk] as palletCount
,[AnzahlVpkProLKW] as totalTruck
,[AnzahlKistenProKarton]
,[BruttoGewicht] / 1000 as totalPKGWeight
--,[AnzahlAVProHE]
,[VpkDimensionenHoehe] as pkgHeight
,[VpkDimensionenBreite] as pkgWidth
,[VpkDimensionenTiefe] as pkgLengh
FROM [AlplaPROD_test1].[dbo].[V_Vpk_BASIS]
)as pkg
on
ac.pkgId = pkg.pkgId
WHERE AbrufStatus = 1
and AbrufLiefertermin between DATEADD(HOUR, -[from], getdate()) and DATEADD(HOUR, [to], getdate())-- this number will be grabbed from the db with a default of 24hours
and deliveryContionAbv not in ('EXW')
--ORDER BY AbrufLiefertermin)
) a
where releaseNumber = [releaseToProcess]
`;

View File

@@ -15,6 +15,6 @@
"esModuleInterop": true, "esModuleInterop": true,
"resolveJsonModule": true "resolveJsonModule": true
}, },
"include": ["server", "scripts/**/*.ts"], "include": ["server", "scripts/**/*.ts", "testFiles/test-tiPostOrders.ts"],
"exclude": ["node_modules", "frontend", "dist", "testFiles"] "exclude": ["node_modules", "frontend", "dist", "testFiles"]
} }