Compare commits

...

5 Commits

26 changed files with 2795 additions and 193 deletions

View File

@@ -54,7 +54,8 @@
"alplaprod", "alplaprod",
"intiallally", "intiallally",
"ppoo", "ppoo",
"prodlabels" "prodlabels",
"rfid"
], ],
"gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db", "gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db",
"gitea.instanceURL": "https://git.tuffraid.net", "gitea.instanceURL": "https://git.tuffraid.net",

View File

@@ -0,0 +1,20 @@
meta {
name: PSI - Forecast data
type: http
seq: 1
}
get {
url: {{url}}/lst/old/api/datamart/psiforecastdata?customer=8
body: none
auth: inherit
}
params:query {
customer: 8
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,23 @@
meta {
name: Consume
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/consume
body: json
auth: inherit
}
body:json {
{
"lotNum":283559,
"runningNr":19302907
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Consume
seq: 5
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,15 @@
meta {
name: SSCC
type: http
seq: 2
}
get {
url:
body: none
auth: inherit
}
settings {
encodeUrl: true
}

View File

@@ -1,5 +1,5 @@
vars { vars {
url: https://usday1prod.alpla.net url: https://uslim1prod.alpla.net
session_cookie: session_cookie:
urlv2: http://usbow1vms006:3000 urlv2: http://usbow1vms006:3000
jwtV2: jwtV2:

View File

@@ -0,0 +1,8 @@
meta {
name: v3endpoints
seq: 5
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,16 @@
meta {
name: tester
type: http
seq: 1
}
post {
url: http://localhost:3000/lst/api/system/prodsql/start
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,2 @@
ALTER TABLE "invHistoricalData" ADD COLUMN "whse_id" text DEFAULT '';--> statement-breakpoint
ALTER TABLE "invHistoricalData" ADD COLUMN "whse_name" text DEFAULT 'missing whseName';

File diff suppressed because it is too large Load Diff

View File

@@ -547,6 +547,13 @@
"when": 1763407463567, "when": 1763407463567,
"tag": "0077_lucky_texas_twister", "tag": "0077_lucky_texas_twister",
"breakpoints": true "breakpoints": true
},
{
"idx": 78,
"version": "7",
"when": 1766514890344,
"tag": "0078_cheerful_the_leader",
"breakpoints": true
} }
] ]
} }

View File

@@ -24,9 +24,11 @@ export const invHistoricalData = pgTable(
lot_Number: text("lot_number"), lot_Number: text("lot_number"),
consignment: text("consignment"), consignment: text("consignment"),
location: text("location"), location: text("location"),
whseId: text("whse_id").default(""),
whseName: text("whse_name").default("missing whseName"),
upd_user: text("upd_user").default("lst"), upd_user: text("upd_user").default("lst"),
upd_date: timestamp("upd_date").defaultNow(), upd_date: timestamp("upd_date").defaultNow(),
} },
// (table) => [ // (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name), // uniqueIndex("role_name").on(table.name),

View File

@@ -0,0 +1,42 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { forecastData } from "../../sqlServer/querys/psiReport/forecast.js";
// type ArticleData = {
// id: string
// }
export const getGetPSIForecastData = async (customer: string) => {
let articles: any = [];
let queryData = forecastData;
console.log(customer);
if (customer) {
queryData = forecastData.replace("[customer]", customer);
}
const { data, error } = (await tryCatch(
query(queryData, "PSI forecast info"),
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the forecast info: ${JSON.stringify(error)}`,
);
return {
success: false,
messsage: `There was an error getting the forecast info`,
data: error,
};
}
articles = data.data;
return {
success: true,
message: "PSI forecast Data",
data: articles,
};
};

View File

@@ -1,4 +1,4 @@
import { and, between, inArray, sql } from "drizzle-orm"; import { and, between, inArray, notInArray, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js"; import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js"; import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
@@ -10,7 +10,9 @@ import { createLog } from "../../logger/logger.js";
export const psiGetInventory = async ( export const psiGetInventory = async (
avs: string, avs: string,
startDate: string, startDate: string,
endDate: string endDate: string,
whseToInclude: string,
exludeLanes: string
) => { ) => {
let articles: any = []; let articles: any = [];
@@ -23,28 +25,49 @@ export const psiGetInventory = async (
} }
const ids = avs.split(",").map((id) => id.trim()); const ids = avs.split(",").map((id) => id.trim());
const whse = whseToInclude
? whseToInclude
.split(",")
.map((w) => w.trim())
.filter(Boolean)
: [];
const { data, error } = (await tryCatch( const locations = exludeLanes
db ? exludeLanes.split(",").map((l) => l.trim()).filter(Boolean)
: [];
const conditions = [
inArray(invHistoricalData.article, ids),
between(invHistoricalData.histDate, startDate, endDate),
];
// only add the warehouse condition if there are any whse values
if (whse.length > 0) {
console.log("adding whse to include in");
conditions.push(inArray(invHistoricalData.whseId, whse));
}
// locations we dont want in the system
if (locations.length > 0) {
console.log("adding excluded lanes in ",locations);
conditions.push(notInArray(invHistoricalData.location, locations));
}
const query = db
.select() .select()
.from(invHistoricalData) .from(invHistoricalData)
.where( .where(and(...conditions));
and(
inArray(invHistoricalData.article, ids), // optional tryCatch or await as you had
between(invHistoricalData.histDate, startDate, endDate) const { data, error } = (await tryCatch(query)) as any;
)
)
//.limit(100)
)) as any;
if (error) { if (error) {
createLog( createLog(
"error", "error",
"datamart", "datamart",
"datamart", "datamart",
`There was an error getting the planning info: ${JSON.stringify( `There was an error getting the planning info: ${JSON.stringify(error)}`,
error
)}`
); );
return { return {
success: false, success: false,

View File

@@ -13,6 +13,7 @@ import getInhouseDeliveryByDate from "./route/getInHouseDeliveryDateByRange.js";
import currentInv from "./route/getInventory.js"; import currentInv from "./route/getInventory.js";
import getOpenOrders from "./route/getOpenOrders.js"; import getOpenOrders from "./route/getOpenOrders.js";
import psiArticleData from "./route/getPsiArticleData.js"; import psiArticleData from "./route/getPsiArticleData.js";
import psiForecastData from "./route/getPsiForecast.js";
import psiInventory from "./route/getPsiinventory.js"; import psiInventory from "./route/getPsiinventory.js";
import psiPlanningData from "./route/getPsiPlanningData.js"; import psiPlanningData from "./route/getPsiPlanningData.js";
import psiProductionData from "./route/getPsiProductionData.js"; import psiProductionData from "./route/getPsiProductionData.js";
@@ -37,6 +38,7 @@ const routes = [
psiPlanningData, psiPlanningData,
psiProductionData, psiProductionData,
psiInventory, psiInventory,
psiForecastData,
] as const; ] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {

View File

@@ -0,0 +1,65 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { getGetPSIForecastData } from "../controller/psiForecastData.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the psiforecastdata.",
method: "get",
path: "/psiforecastdata",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const customer: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/psiforecastdata" });
//console.log(articles["avs"][0]);
let customeArticle = null;
if (customer) {
customeArticle = customer["customer"][0];
}
const { data, error } = await tryCatch(
getGetPSIForecastData(customeArticle),
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the articles.",
data: error,
},
400,
);
}
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400,
);
},
);
export default app;

View File

@@ -1,7 +1,7 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi"; import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js"; import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js"; import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { psiGetInventory } from "../controller/psiGetInventory.js"; import { psiGetInventory } from "../controller/psiGetInventory.js";
const app = new OpenAPIHono({ strict: false }); const app = new OpenAPIHono({ strict: false });
@@ -33,8 +33,10 @@ app.openapi(
psiGetInventory( psiGetInventory(
q["avs"] ? q["avs"][0] : null, q["avs"] ? q["avs"][0] : null,
q["startDate"] ? q["startDate"][0] : null, q["startDate"] ? q["startDate"][0] : null,
q["endDate"] ? q["endDate"][0] : null q["endDate"] ? q["endDate"][0] : null,
) q["whseToInclude"] ? q["whseToInclude"][0] : null,
q["exludeLanes"] ? q["exludeLanes"][0] : null,
),
); );
if (error) { if (error) {
@@ -45,7 +47,7 @@ app.openapi(
message: "There was an error getting the production.", message: "There was an error getting the production.",
data: error, data: error,
}, },
400 400,
); );
} }
@@ -57,8 +59,8 @@ app.openapi(
message: data.message, message: data.message,
data: data.data, data: data.data,
}, },
data.success ? 200 : 400 data.success ? 200 : 400,
); );
} },
); );
export default app; export default app;

View File

@@ -2,21 +2,21 @@ import { OpenAPIHono } from "@hono/zod-openapi";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
import stats from "./route/stats.js";
import history from "./route/invHistory.js";
import { createJob } from "../notifications/utils/processNotifications.js";
import { historicalInvIMmport } from "./utils/historicalInv.js";
import { tryCatch } from "../../globalUtils/tryCatch.js"; import { tryCatch } from "../../globalUtils/tryCatch.js";
import { createLog } from "../logger/logger.js";
import { createJob } from "../notifications/utils/processNotifications.js";
import { query } from "../sqlServer/prodSqlServer.js"; import { query } from "../sqlServer/prodSqlServer.js";
import { shiftChange } from "../sqlServer/querys/misc/shiftChange.js"; import { shiftChange } from "../sqlServer/querys/misc/shiftChange.js";
import { createLog } from "../logger/logger.js"; import gpData from "./route/getGpData.js";
import lastPurch from "./route/getLastPurchPrice.js"; import lastPurch from "./route/getLastPurchPrice.js";
import lastSales from "./route/getLastSalesPrice.js"; import lastSales from "./route/getLastSalesPrice.js";
import gpData from "./route/getGpData.js";
import consumptionData from "./route/getProductionConsumption.js"; import consumptionData from "./route/getProductionConsumption.js";
import purchased from "./route/getPurchased.js";
import regrind from "./route/getregrind.js"; import regrind from "./route/getregrind.js";
import soldItems from "./route/getSoldItems.js"; import soldItems from "./route/getSoldItems.js";
import purchased from "./route/getPurchased.js"; import history from "./route/invHistory.js";
import stats from "./route/stats.js";
import { historicalInvIMmport } from "./utils/historicalInv.js";
const routes = [ const routes = [
stats, stats,
@@ -36,7 +36,7 @@ const appRoutes = routes.forEach((route) => {
setTimeout(async () => { setTimeout(async () => {
const { data: shift, error: shiftError } = (await tryCatch( const { data: shift, error: shiftError } = (await tryCatch(
query(shiftChange, "shift change from material.") query(shiftChange, "shift change from material."),
)) as any; )) as any;
if (shiftError) { if (shiftError) {
@@ -44,7 +44,7 @@ setTimeout(async () => {
"error", "error",
"eom", "eom",
"eom", "eom",
"There was an error getting the shift times will use fallback times" "There was an error getting the shift times will use fallback times",
); );
} }
@@ -63,4 +63,10 @@ setTimeout(async () => {
// the time we want to run the hostircal data should be the same time the historical data run on the server // the time we want to run the hostircal data should be the same time the historical data run on the server
// getting this from the shift time // getting this from the shift time
if (process.env.NODE_ENV?.trim() !== "production") {
setTimeout(() => {
historicalInvIMmport();
}, 15 * 1000);
}
export default app; export default app;

View File

@@ -76,7 +76,10 @@ export const historicalInvIMmport = async () => {
coa_QTY: i.COA_QTY, coa_QTY: i.COA_QTY,
held_QTY: i.Held_QTY, held_QTY: i.Held_QTY,
consignment: i.Consigment, consignment: i.Consigment,
lot_Number: i.lot, lot_Number: i.Lot,
location: i.location,
whseId: i.warehouseID,
whseName: i.warehouseName,
}; };
}); });

View File

@@ -1,4 +1,5 @@
import { addDays, addHours, isAfter, parse } from "date-fns"; import { addDays, addHours, isAfter, parse } from "date-fns";
import { format } from "date-fns-tz";
import XLSX from "xlsx"; import XLSX from "xlsx";
import { db } from "../../../../../../../database/dbclient.js"; import { db } from "../../../../../../../database/dbclient.js";
import { settings } from "../../../../../../../database/schema/settings.js"; import { settings } from "../../../../../../../database/schema/settings.js";
@@ -92,7 +93,7 @@ export const abbottOrders = async (data: any, user: any) => {
orders: [], orders: [],
}; };
const oOrders: any = openOrders; const oOrders: any = openOrders;
//console.log(orderData);
let correctedOrders: any = orderData let correctedOrders: any = orderData
.filter( .filter(
(o: any) => (o: any) =>
@@ -147,6 +148,7 @@ export const abbottOrders = async (data: any, user: any) => {
// Map Excel data to predefinedObject format // Map Excel data to predefinedObject format
const orders = filterOrders.map((o: any) => { const orders = filterOrders.map((o: any) => {
//console.log(o.po, " ", o.date, format(o.date, "M/d/yyyy HH:mm"));
return { return {
customerId: customerID, customerId: customerID,
invoiceAddressId: invoiceID, invoiceAddressId: invoiceID,
@@ -157,7 +159,7 @@ export const abbottOrders = async (data: any, user: any) => {
deliveryAddressId: 8, deliveryAddressId: 8,
customerArticleNo: o.customerArticlenumber, customerArticleNo: o.customerArticlenumber,
quantity: o.qty, quantity: o.qty,
deliveryDate: addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around deliveryDate: addHours(format(o.date, "M/d/yyyy HH:mm"), 1), //addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
customerLineItemNo: 1, // this is how it is currently sent over from abbott customerLineItemNo: 1, // this is how it is currently sent over from abbott
customerReleaseNo: 1, // same as above customerReleaseNo: 1, // same as above
}, },
@@ -165,6 +167,7 @@ export const abbottOrders = async (data: any, user: any) => {
}; };
}); });
//console.log(orders);
// combine it all together. // combine it all together.
const updatedPredefinedObject = { const updatedPredefinedObject = {
...predefinedObject, ...predefinedObject,

View File

@@ -1,34 +1,60 @@
import { getJsDateFromExcel } from "excel-date-to-js"; import { getJsDateFromExcel } from "excel-date-to-js";
export const excelDateStuff = (serial: number, time: any = 0) => { // export const excelDateStuff = (serial: number, time?: any) => {
// console.log(serial); // // add 5 hours or the offset to utc
// add 5 hours or the offset to utc
// get the local timezone // // get the local timezone
const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number; // const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number;
if (serial % 1 === 0) { // if (!time) {
time = 800; // time = 800;
} // }
const addHours = serial + localoffset / 24; // const addHours = serial + localoffset / 24;
//console.log(getJsDateFromExcel(addHours)); // //console.log(getJsDateFromExcel(addHours));
// if (typeof serial !== "number" || serial <= 0) {
// return "invalid Date";
// }
// const date = getJsDateFromExcel(addHours); // base date from Excel serial
// if (time != 0) {
// // convert the time over to hour and min
// const hours = Math.floor(time / 100);
// const minutes = time % 100;
// date.setHours(hours);
// date.setMinutes(minutes);
// }
// //console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
// //console.log(serial);
// console.log(date.toISOString(), serial, time);
// return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred
// };
export const excelDateStuff = (serial: number, time?: any) => {
if (typeof serial !== "number" || serial <= 0) { if (typeof serial !== "number" || serial <= 0) {
return "invalid Date"; return "invalid Date";
} }
const date = getJsDateFromExcel(addHours); // base date from Excel serial // Default time to 8:00 AM if not provided
if (!time) {
time = 800;
}
if (time != 0) { // Get base date from Excel serial (this gives you UTC midnight)
// convert the time over to hour and min const date = getJsDateFromExcel(serial);
const localOffset = new Date().getTimezoneOffset() / 60;
const hours = Math.floor(time / 100); const hours = Math.floor(time / 100);
const minutes = time % 100; const minutes = time % 100;
date.setHours(hours);
date.setMinutes(minutes);
}
//console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
//console.log(serial); // Set the time in UTC
//console.log(date.toISOString()); date.setUTCHours(hours + localOffset);
return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred date.setUTCMinutes(minutes);
date.setUTCSeconds(0);
date.setUTCMilliseconds(0);
//console.log(date.toISOString(), serial, time);
return date.toISOString();
}; };

View File

@@ -101,7 +101,7 @@ export const qualityCycle = async () => {
.where(eq(qualityRequest.runningNr, lstQData[i].runningNr)); .where(eq(qualityRequest.runningNr, lstQData[i].runningNr));
createLog( createLog(
"info", "debug",
"lst", "lst",
"quality", "quality",
`Pallet ${lstQData[i].runningNr} was updated`, `Pallet ${lstQData[i].runningNr} was updated`,

View File

@@ -1,7 +1,7 @@
export const inhouseDelivery = ` export const inhouseDelivery = `
declare @shiftStart varchar(max) = (select top(1) CAST(StartDate AS time(0)) from [test1_AlplaPROD2.0_Read].[masterData].[ShiftDefinition] (nolock) order by TeamNumber) declare @shiftStart varchar(max) = (select top(1) CAST(StartDate AS time(0)) from [test1_AlplaPROD2.0_Read].[masterData].[ShiftDefinition] (nolock) order by TeamNumber)
SELECT TOP (1000) SELECT
ProduktionsLos as lot ProduktionsLos as lot
,Menge as qty ,Menge as qty
,Barcode as barcode ,Barcode as barcode

View File

@@ -17,6 +17,10 @@ x.ArtikelVariantenAlias as Alias
,IdProdPlanung as Lot ,IdProdPlanung as Lot
----,IdAdressen, ----,IdAdressen,
,x.AdressBez ,x.AdressBez
,x.IdLagerAbteilung as 'location'
,x.LagerAbteilungKurzBez
,x.IdWarenlager as warehouseID
,x.WarenLagerKurzBez as warehouseName
--,* --,*
from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x
@@ -39,6 +43,10 @@ group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description
--,IdAdressen --,IdAdressen
,x.AdressBez ,x.AdressBez
,IdProdPlanung ,IdProdPlanung
,x.IdLagerAbteilung
,x.LagerAbteilungKurzBez
,x.IdWarenlager
,x.WarenLagerKurzBez
--, x.Lfdnr --, x.Lfdnr
order by x.IdArtikelVarianten order by x.IdArtikelVarianten

View File

@@ -0,0 +1,12 @@
export const featureCheck = `
SELECT count(*)
FROM [test2_AlplaPROD2.0_Read].[support].[FeatureActivation]
where feature in (108,7)
`;
/*
as more features get activated and need to have this checked to include the new endpoints add here so we can check this.
108 = waste
7 = warehousing
*/

View File

@@ -0,0 +1,10 @@
export const forecastData = `
SELECT format(cast(RequirementDate as date),'M/d/yyyy') as requirementDate
,ArticleHumanReadableId
,CustomerArticleNumber
,ArticleDescription
,Quantity
FROM [test1_AlplaPROD2.0_Read].[forecast].[Forecast]
where DeliveryAddressHumanReadableId = [customer]
order by RequirementDate
`;