Compare commits

..

18 Commits

Author SHA1 Message Date
2ff7b9baf9 refactor(migrations): not needed but we have it and needed to correct the settings 2025-08-10 18:15:07 -05:00
8145dc800d refactor(siloadjustment): refactored to get the settings from the state vs direct from db 2025-08-10 18:14:38 -05:00
6ccf500e5e feat(prodrole): added in planner role 2025-08-10 18:14:08 -05:00
103171c924 refactor(notifications): refactored the cron job system so we can utilize outside the service 2025-08-10 18:13:32 -05:00
2eb6fa7794 fix(gotransport): error handling so we dont get spammed with errors 2025-08-10 18:12:52 -05:00
397f1da595 fix(inv query): error in improper placed , in the query 2025-08-10 18:12:29 -05:00
8d63f7f6b0 feat(psi): psi querys added and av grab right now 2025-08-10 18:11:57 -05:00
52345bc94c feat(eom): added in hostorical data and deletion for data over 45 days 2025-08-10 18:11:16 -05:00
a8a1c1d7fb ci(release): bump build number to 548 2025-08-09 15:05:59 -05:00
83ff2641f3 ci(release): bump build number to 547 2025-08-09 15:02:21 -05:00
7c48f608bc ci(release): bump build number to 546 2025-08-09 14:58:02 -05:00
1802b9ba4e ci(release): bump build number to 545 2025-08-09 14:41:23 -05:00
67a12ccc5c ci(release): bump build number to 544 2025-08-09 14:28:17 -05:00
15e2a65cbb ci(release): bump build number to 543 2025-08-07 21:18:09 -05:00
9e5577e6bb feat(dm): changes to have a default time if nothing is passed in the excel 2025-08-06 15:25:45 -05:00
c52e2a8671 ci(release): bump build number to 542 2025-08-06 15:19:54 -05:00
8f76d6998c ci(release): bump build number to 541 2025-08-06 15:08:13 -05:00
e209686d3c ci(release): bump build number to 540 2025-08-05 14:50:05 -05:00
26 changed files with 5019 additions and 53 deletions

View File

@@ -0,0 +1,10 @@
CREATE TABLE "labelRatio" (
" ratio_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text DEFAULT 'labels',
"autoLabel" integer DEFAULT 0,
"manualLabel" integer DEFAULT 0,
"lastReset" timestamp DEFAULT now()
);
--> statement-breakpoint
ALTER TABLE "invHistoricalData" ADD COLUMN "lot_number" text;--> statement-breakpoint
CREATE UNIQUE INDEX "labelname" ON "labelRatio" USING btree ("name");

View File

@@ -0,0 +1,2 @@
ALTER TABLE "invHistoricalData" ALTER COLUMN "upd_user" SET DEFAULT 'lst';--> statement-breakpoint
ALTER TABLE "invHistoricalData" ALTER COLUMN "upd_date" SET DEFAULT now();

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -491,6 +491,20 @@
"when": 1752195894698, "when": 1752195894698,
"tag": "0069_chemical_maximus", "tag": "0069_chemical_maximus",
"breakpoints": true "breakpoints": true
},
{
"idx": 70,
"version": "7",
"when": 1754767718941,
"tag": "0070_brief_mephisto",
"breakpoints": true
},
{
"idx": 71,
"version": "7",
"when": 1754768521841,
"tag": "0071_fantastic_old_lace",
"breakpoints": true
} }
] ]
} }

View File

@@ -12,7 +12,7 @@ export const invHistoricalData = pgTable(
"invHistoricalData", "invHistoricalData",
{ {
inv_id: uuid("inv_id").defaultRandom().primaryKey(), inv_id: uuid("inv_id").defaultRandom().primaryKey(),
histDate: date("histDate").notNull(), // what month are we running in should just be the first of current month histDate: date("histDate").notNull(), // this date should always be yesterday when we post it.
plantToken: text("plantToken"), plantToken: text("plantToken"),
article: text("article").notNull(), article: text("article").notNull(),
articleDescription: text("articleDescription").notNull(), articleDescription: text("articleDescription").notNull(),
@@ -21,10 +21,11 @@ export const invHistoricalData = pgTable(
avaliable_QTY: text("avaliable_QTY"), avaliable_QTY: text("avaliable_QTY"),
coa_QTY: text("coa_QTY"), coa_QTY: text("coa_QTY"),
held_QTY: text("held_QTY"), held_QTY: text("held_QTY"),
lot_Number: text("lot_number"),
consignment: text("consignment"), consignment: text("consignment"),
location: text("location"), location: text("location"),
upd_user: text("upd_user"), upd_user: text("upd_user").default("lst"),
upd_date: timestamp("upd_date"), upd_date: timestamp("upd_date").defaultNow(),
} }
// (table) => [ // (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),

View File

@@ -36,7 +36,7 @@
} }
}, },
"admConfig": { "admConfig": {
"build": 539, "build": 548,
"oldBuild": "backend-0.1.3.zip" "oldBuild": "backend-0.1.3.zip"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -0,0 +1,47 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { articleInfo } from "../../sqlServer/querys/psiReport/articleData.js";
// type ArticleData = {
// id: string
// }
export const getGetPSIArticleData = async (avs: string) => {
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const { data, error } = (await tryCatch(
query(articleInfo.replace("[articles]", avs), "PSI article info")
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the article info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the article info`,
data: error,
};
}
articles = data.data;
return {
success: true,
message: "PSI Article Data",
data: articles,
};
};

View File

@@ -9,6 +9,7 @@ import fakeEDI from "./route/fakeEDI.js";
import addressCorrections from "./route/getCityStateData.js"; import addressCorrections from "./route/getCityStateData.js";
import fifoIndex from "./route/getFifoIndex.js"; import fifoIndex from "./route/getFifoIndex.js";
import financeAudit from "./route/getFinanceAudit.js"; import financeAudit from "./route/getFinanceAudit.js";
import psiArticleData from "./route/getPsiArticleData.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
@@ -23,6 +24,7 @@ const routes = [
addressCorrections, addressCorrections,
fifoIndex, fifoIndex,
financeAudit, financeAudit,
psiArticleData,
] as const; ] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {

View File

@@ -0,0 +1,61 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { getDeliveryByDateRange } from "../controller/getDeliveryByDateRange.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { getGetPSIArticleData } from "../controller/psiGetArticleData.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the psiarticleData.",
method: "get",
path: "/psiarticledata",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const articles: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/psiarticledata" });
//console.log(articles["avs"][0]);
const { data, error } = await tryCatch(
getGetPSIArticleData(articles ? articles["avs"][0] : null)
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the articles.",
data: error,
},
400
);
}
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400
);
}
);
export default app;

View File

@@ -1,15 +0,0 @@
// import {prisma} from "database";
// import {createLog} from "logging";
// export const deleteHistory = async (date: string) => {
// // delete the inventory if it equals this date
// try {
// const remove = await prisma.$executeRaw`
// DELETE FROM historyInventory
// WHERE histDate < ${date}
// `;
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
// } catch (error) {
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
// }
// };

View File

@@ -4,10 +4,19 @@ const app = new OpenAPIHono();
import stats from "./route/stats.js"; import stats from "./route/stats.js";
import history from "./route/invHistory.js"; import history from "./route/invHistory.js";
import { createJob } from "../notifications/utils/processNotifications.js";
import { historicalInvIMmport } from "./utils/historicalInv.js";
const routes = [stats, history] as const; const routes = [stats, history] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {
app.route("/eom", route); app.route("/eom", route);
}); });
// setTimeout(() => {
// historicalInvIMmport();
// }, 5 * 1000);
// the time we want to run the hostircal data should be the same time the historical data run on the server
// getting this from the shift time
createJob("eom_historical_inv", "0 7 * * *", historicalInvIMmport);
export default app; export default app;

View File

@@ -0,0 +1,101 @@
import { sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { totalInvNoRn } from "../../sqlServer/querys/dataMart/totalINV.js";
import { format } from "date-fns-tz";
import { serverSettings } from "../../server/controller/settings/getSettings.js";
import { deleteHistory } from "./removeHistorical.js";
export const historicalInvIMmport = async () => {
const plantToken = serverSettings.filter((n) => n.name === "plantToken");
const { data, error } = (await tryCatch(
db.select().from(invHistoricalData)
)) as any;
if (error) {
createLog(
"error",
"eom",
"eom",
`There was an error getting the historical data`
);
}
// check if we have data already for today this way we dont duplicate anything.
const today = new Date();
today.setDate(today.getDate() - 1);
const dateCheck = data?.filter(
(i: any) => i.histDate === format(today, "yyyy-MM-dd")
);
if (dateCheck.length === 0) {
// get the historical data from the sql
const { data: inv, error: invError } = (await tryCatch(
query(totalInvNoRn, "eom historical data")
)) as any;
if (invError) {
createLog(
"error",
"eom",
"eom",
`There was an error getting the sql data`
);
return;
}
if (inv.data.length === 0) {
createLog("error", "eom", "eom", inv.message);
return;
}
const importInv = inv.data ? inv.data : [];
const eomImportData = importInv.map((i: any) => {
return {
histDate: sql`(NOW() - INTERVAL '1 day')::date`,
plantToken: plantToken[0].value,
article: i.av,
articleDescription: i.Alias,
materialType: "",
total_QTY: i.Total_PalletQTY,
avaliable_QTY: i.Avaliable_PalletQTY,
coa_QTY: i.COA_QTY,
held_QTY: i.Held_QTY,
consignment: i.Consigment,
lot_Number: i.lot,
};
});
const { data: dataImport, error: errorImport } = await tryCatch(
db.insert(invHistoricalData).values(eomImportData)
);
if (errorImport) {
createLog(
"error",
"eom",
"eom",
`There was an error importing all the inventory data.`
);
return;
}
if (dataImport) {
createLog(
"info",
"eom",
"eom",
`All data was imported succefully.`
);
return;
}
} else {
createLog("info", "eom", "eom", `Yesterdays Data already in..`);
}
// do the check to delete old data
deleteHistory();
};

View File

@@ -0,0 +1,51 @@
// import {prisma} from "database";
// import {createLog} from "logging";
import { lte, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
// export const deleteHistory = async (date: string) => {
// // delete the inventory if it equals this date
// try {
// const remove = await prisma.$executeRaw`
// DELETE FROM historyInventory
// WHERE histDate < ${date}
// `;
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
// } catch (error) {
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
// }
// };
export const deleteHistory = async () => {
const { data, error } = await tryCatch(
db
.delete(invHistoricalData)
.where(
lte(
invHistoricalData.histDate,
sql`(NOW() - INTERVAL '45 day')::date`
)
)
);
if (error) {
createLog(
"error",
"eom",
"eom",
"There was an error deleting the historical data."
);
return;
}
createLog(
"info",
"eom",
"eom",
"Data older than 45 days has been deleted."
);
};

View File

@@ -2,6 +2,7 @@ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import axios from "axios"; import axios from "axios";
import { pino } from "pino"; import { pino } from "pino";
import build from "pino-abstract-transport"; import build from "pino-abstract-transport";
import { tryCatch } from "../../globalUtils/tryCatch.js";
const pinoLogLevels: any = { const pinoLogLevels: any = {
10: "trace", 10: "trace",
@@ -26,12 +27,19 @@ export default async function buildGoTransport() {
// service: obj?.service.toLowerCase(), // service: obj?.service.toLowerCase(),
// message: obj.msg, // message: obj.msg,
// }); // });
const { data, error } = (await tryCatch(
axios.post(`${process.env.LST_BASE_URL}/api/v1/log`, {
service: obj?.service.toLowerCase(),
level: levelName,
message: obj.msg,
})
)) as any;
axios.post(`${process.env.LST_BASE_URL}/api/v1/log`, { if (error) {
service: obj?.service.toLowerCase(), console.log(
level: levelName, "The go server must be offline so we cant post the new logs."
message: obj.msg, );
}); }
// console.log(`Go log level: ${levelName}`); // console.log(`Go log level: ${levelName}`);
} }

View File

@@ -9,6 +9,10 @@ import { sendEmail } from "../../../notifications/controller/sendMail.js";
import { settings } from "../../../../../database/schema/settings.js"; import { settings } from "../../../../../database/schema/settings.js";
import { generateOneTimeKey } from "../../../../globalUtils/singleUseKey.js"; import { generateOneTimeKey } from "../../../../globalUtils/singleUseKey.js";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
import {
getSettings,
serverSettings,
} from "../../../server/controller/settings/getSettings.js";
export const createSiloAdjustment = async ( export const createSiloAdjustment = async (
data: any | null, data: any | null,
@@ -18,18 +22,21 @@ export const createSiloAdjustment = async (
* Creates a silo adjustment based off warehouse, location, and qty. * Creates a silo adjustment based off warehouse, location, and qty.
* qty will come from the hmi, prolink, or silo patrol * qty will come from the hmi, prolink, or silo patrol
*/ */
const { data: set, error: setError } = await tryCatch( // const { data: set, error: setError } = await tryCatch(
db.select().from(settings) // db.select().from(settings)
); // );
if (setError) { // const { data: set, error: setError } = await tryCatch(getSettings());
return {
success: false,
message: `There was an error getting setting data to post to the server.`,
data: setError,
};
}
// if (setError) {
// return {
// success: false,
// message: `There was an error getting setting data to post to the server.`,
// data: setError,
// };
// }
const set = serverSettings.length === 0 ? [] : serverSettings;
// getting stock data first so we have it prior to the adjustment // getting stock data first so we have it prior to the adjustment
const { data: s, error: stockError } = await tryCatch( const { data: s, error: stockError } = await tryCatch(
query(siloQuery, "Silo data Query") query(siloQuery, "Silo data Query")

View File

@@ -12,18 +12,25 @@ import { delay } from "../../../../globalUtils/delay.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { settings } from "../../../../../database/schema/settings.js"; import { settings } from "../../../../../database/schema/settings.js";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
import {
getSettings,
serverSettings,
} from "../../../server/controller/settings/getSettings.js";
export const migrateAdjustments = async () => { export const migrateAdjustments = async () => {
/** /**
* Migrates the silo adjustments from v1 to v2 * Migrates the silo adjustments from v1 to v2
*/ */
const { data, error } = await tryCatch(db.select().from(settings)); //const { data, error } = await tryCatch(db.select().from(settings));
// const { data, error } = await tryCatch(getSettings());
if (error) { // if (error) {
createLog("error", "silo", "logistics", "Getting settings."); // createLog("error", "silo", "logistics", "Getting settings.");
return; // return;
} // }
const data = serverSettings.length === 0 ? [] : serverSettings;
const migrationCompleted = data?.filter( const migrationCompleted = data?.filter(
(n) => n.name === "siloAdjMigrations" (n) => n.name === "siloAdjMigrations"
@@ -44,7 +51,7 @@ export const migrateAdjustments = async () => {
} }
const { data: s, error: siloError } = await tryCatch( const { data: s, error: siloError } = await tryCatch(
axios.get( axios.get(
`http://${server[0].value}:${port[0].value}/api/v1/warehouse/getSilosAdjustment?startDate=1/1/2020&endDate=4/1/2026` `http://${server[0]?.value}:${port[0]?.value}/api/v1/warehouse/getSilosAdjustment?startDate=1/1/2020&endDate=4/1/2026`
) )
); );

View File

@@ -7,6 +7,10 @@ export const excelDateStuff = (serial: number, time: any = 0) => {
// get the local timezone // get the local timezone
const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number; const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number;
if (serial % 1 === 0) {
time = 800;
}
const addHours = serial + localoffset / 24; const addHours = serial + localoffset / 24;
//console.log(getJsDateFromExcel(addHours)); //console.log(getJsDateFromExcel(addHours));
if (typeof serial !== "number" || serial <= 0) { if (typeof serial !== "number" || serial <= 0) {
@@ -23,6 +27,8 @@ export const excelDateStuff = (serial: number, time: any = 0) => {
date.setMinutes(minutes); date.setMinutes(minutes);
} }
//console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours)); //console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
//console.log(serial);
//console.log(date.toISOString()); //console.log(date.toISOString());
return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred
}; };

View File

@@ -105,7 +105,7 @@ export const startNotificationMonitor = async () => {
}, 5 * 1000); }, 5 * 1000);
}; };
const createJob = async ( export const createJob = async (
id: string, id: string,
schedule: string, schedule: string,
task: () => Promise<void> task: () => Promise<void>

View File

@@ -50,6 +50,18 @@ const newProdRoles: any = [
}, },
// logistics // logistics
{
name: "planner",
description: "Planning role.",
roles: [
"Administration\\Scan\\ApiConsumer",
"Administration\\Printing\\ApiConsumer",
"Logistics\\Warehousing\\ProcessAdmin",
"Manufacturing\\IssueMaterial\\ProcessAdmin",
"Manufacturing\\ProductionLabelling\\ProcessAdmin",
],
rolesLegacy: [55, 95, 15, 105, 145, 9],
},
// plant manager // plant manager
{ {
name: "plantManager", name: "plantManager",

View File

@@ -1,21 +1,22 @@
// this query pulls all the inventory except the inv locations. // this query pulls all the inventory except the inv locations.
export const totalInvNoRn = ` export const totalInvNoRn = `
select x.idartikelVarianten as av, select
ArtikelVariantenAlias as Alias, x.idartikelVarianten as av,
x.ArtikelVariantenAlias as Alias
--x.Lfdnr as RunningNumber, --x.Lfdnr as RunningNumber,
round(sum(EinlagerungsMengeVPKSum),0) as Total_Pallets, ,round(sum(EinlagerungsMengeVPKSum),0) as Total_Pallets
sum(EinlagerungsMengeSum) as Total_PalletQTY, ,sum(EinlagerungsMengeSum) as Total_PalletQTY
round(sum(VerfuegbareMengeVPKSum),0) as Avalible_Pallets, ,round(sum(VerfuegbareMengeVPKSum),0) as Avalible_Pallets
sum(VerfuegbareMengeSum) as Avaliable_PalletQTY, ,sum(VerfuegbareMengeSum) as Avaliable_PalletQTY
sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as COA_Pallets, ,sum(case when c.Description LIKE '%COA%' then GesperrteMengeVPKSum else 0 end) as COA_Pallets
sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as COA_QTY, ,sum(case when c.Description LIKE '%COA%' then GesperrteMengeSum else 0 end) as COA_QTY
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as Held_Pallets, ,sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeVPKSum else 0 end) as Held_Pallets
sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as Held_QTY ,sum(case when c.Description NOT LIKE '%COA%' or x.IdMainDefect = -1 then GesperrteMengeSum else 0 end) as Held_QTY
,sum(case when x.WarenLagerLagerTyp = 8 then VerfuegbareMengeSum else 0 end) as Consigment ,sum(case when x.WarenLagerLagerTyp = 8 then VerfuegbareMengeSum else 0 end) as Consigment
,IdProdPlanung as Lot ,IdProdPlanung as Lot
--,IdAdressen, ----,IdAdressen,
x.AdressBez ,x.AdressBez
--,* --,*
from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x

View File

@@ -0,0 +1,40 @@
export const articleInfo = `
use [test1_AlplaPROD2.0_Read]
select a.Id,
a.HumanReadableId as av,
a.Alias as alias,
p.LoadingUnitsPerTruck as loadingUnitsPerTruck,
p.LoadingUnitsPerTruck * p.LoadingUnitPieces as qtyPerTruck,
p.LoadingUnitPieces,
case when i.MinQuantity IS NOT NULL then round(cast(i.MinQuantity as float), 2) else 0 end as min,
case when i.MaxQuantity IS NOT NULL then round(cast(i.MaxQuantity as float),2) else 0 end as max
from masterData.Article (nolock) as a
/* sales price */
left join
(select *
from (select
id,
PackagingId,
ArticleId,
DefaultCustomer,
ROW_NUMBER() OVER (PARTITION BY ArticleId ORDER BY ValidAfter DESC) AS RowNum
from masterData.SalesPrice (nolock)
where DefaultCustomer = 1) as x
where RowNum = 1
) as s
on a.id = s.ArticleId
/* pkg instuctions */
left join
masterData.PackagingInstruction (nolock) as p
on s.PackagingId = p.id
/* stock limits */
left join
masterData.StockLimit (nolock) as i
on a.id = i.ArticleId
where a.active = 1 and a.HumanReadableId in ([articles])
`;

View File

@@ -0,0 +1,19 @@
export const psiDeliveredData = `
use AlplaPROD_test1
declare @start_date nvarchar(30) = [startDate] --'2025-01-01'
declare @end_date nvarchar(30) = [endDate] --'2025-08-09'
select IdArtikelVarianten,
ArtikelVariantenBez,
sum(Menge) totalDelivered,
case when convert(time, upd_date) between '00:00' and '07:00' then convert(date, upd_date - 1) else convert(date, upd_date) end as ShippingDate
from dbo.V_LadePlanungenLadeAuftragAbruf (nolock)
where upd_date between CONVERT(datetime, @start_date + ' 7:00') and CONVERT(datetime, @end_date + ' 7:00') and IdArtikelVarianten in ([articles])
group by IdArtikelVarianten, upd_date,
ArtikelVariantenBez
`;

View File

@@ -0,0 +1,27 @@
export const psiDeliveredData = `
use [test1_AlplaPROD2.0_Read]
declare @start_date nvarchar(30) = [startDate] --'2025-01-01'
declare @end_date nvarchar(30) = [endDate] --'2025-08-09'
select
ArticleHumanReadableId,
ArticleAlias,
cast(Quantity as int) Quantity,
--cast(DeliveryDate as time) as deliveryTime,
--cast(DeliveryDate as date) as originalDeliveryDate,
case when cast(DeliveryDate as time) between '00:00' and '07:00'
then DATEADD(DAY, -1, CONVERT(DATE, DeliveryDate))
else cast(DeliveryDate as date)
end as ShippingDate
--,*
from [order].[Release]
where case when cast(DeliveryDate as time) between '00:00' and '07:00'
then DATEADD(DAY, -1, CONVERT(DATE, DeliveryDate))
else cast(DeliveryDate as date)
end between @start_date and @end_date
and ArticleHumanReadableId in ([articles])
order by DeliveryDate`;

View File

@@ -0,0 +1,33 @@
export const planningNumbersByAVDate = `
use AlplaPROD_test1
declare @start_date nvarchar(30) = [startDate] --'2025-01-01'
declare @end_date nvarchar(30) = [endDate] --'2025-08-09'
/*
articles will need to be passed over as well as the date structure we want to see
*/
-- planned lots in planning
select V_ProdLosProduktionJeProdTag_PLANNING.IdArtikelvarianten As Article,
ProduktionAlias as Description,
standort as MachineId,
MaschinenBezeichnung as MachineName,
--MaschZyklus as PlanningCycleTime,
V_ProdLosProduktionJeProdTag_PLANNING.IdProdPlanung as LotNumber,
FORMAT(ProdTag, 'MM/dd/yyyy') as ProductionDay,
V_ProdLosProduktionJeProdTag_PLANNING.planMenge as TotalPlanned,
ProduktionMenge as QTYPerDay,
round(ProduktionMengeVPK, 2) PalDay,
Status as finished
--MaschStdAuslastung as nee
from dbo.V_ProdLosProduktionJeProdTag_PLANNING (nolock)
left join
dbo.V_ProdPlanung (nolock) on
V_ProdLosProduktionJeProdTag_PLANNING .IdProdPlanung = V_ProdPlanung.IdProdPlanung
where V_ProdLosProduktionJeProdTag_PLANNING.IdArtikelvarianten in ([articles]) and ProdTag between @start_date and @end_date --and IdProdPlanung = 18442
order by ProdTag
`;

View File

@@ -0,0 +1,21 @@
export const productionNumbers = `
use [test1_AlplaPROD2.0_Reporting]
declare @startDate nvarchar(30) = [startDate] --'2024-12-30'
declare @endDate nvarchar(30) = [endDate] --'2025-08-09'
select MachineLocation,
ArticleHumanReadableId as article,
sum(Quantity) as Produced,
count(Quantity) as palletsProdued,
FORMAT(convert(date, ProductionDay), 'M/d/yyyy') as ProductionDay,
ProductionLotHumanReadableId as productionLot
from [reporting_productionControlling].[ScannedUnit] (nolock)
where convert(date, ProductionDay) between @startDate and @endDate and ArticleHumanReadableId in ([articles]) and BookedOut is null
group by MachineLocation, ArticleHumanReadableId,ProductionDay, ProductionLotHumanReadableId
order by ProductionDay
`;