Compare commits
5 Commits
8e3d2b3d95
...
09f16f4e62
| Author | SHA1 | Date | |
|---|---|---|---|
| 09f16f4e62 | |||
| 461acb2b16 | |||
| 0d05c66a2b | |||
| 096cc18477 | |||
| f3333ce020 |
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -54,7 +54,8 @@
|
||||
"alplaprod",
|
||||
"intiallally",
|
||||
"ppoo",
|
||||
"prodlabels"
|
||||
"prodlabels",
|
||||
"rfid"
|
||||
],
|
||||
"gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db",
|
||||
"gitea.instanceURL": "https://git.tuffraid.net",
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
meta {
|
||||
name: PSI - Forecast data
|
||||
type: http
|
||||
seq: 1
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{url}}/lst/old/api/datamart/psiforecastdata?customer=8
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
params:query {
|
||||
customer: 8
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
meta {
|
||||
name: Consume
|
||||
type: http
|
||||
seq: 1
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{url}}/lst/old/api/logistics/consume
|
||||
body: json
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
body:json {
|
||||
{
|
||||
"lotNum":283559,
|
||||
"runningNr":19302907
|
||||
}
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
meta {
|
||||
name: Consume
|
||||
seq: 5
|
||||
}
|
||||
|
||||
auth {
|
||||
mode: inherit
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
meta {
|
||||
name: SSCC
|
||||
type: http
|
||||
seq: 2
|
||||
}
|
||||
|
||||
get {
|
||||
url:
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
vars {
|
||||
url: https://usday1prod.alpla.net
|
||||
url: https://uslim1prod.alpla.net
|
||||
session_cookie:
|
||||
urlv2: http://usbow1vms006:3000
|
||||
jwtV2:
|
||||
|
||||
8
LogisticsSupportTool_API_DOCS/v3endpoints/folder.bru
Normal file
8
LogisticsSupportTool_API_DOCS/v3endpoints/folder.bru
Normal file
@@ -0,0 +1,8 @@
|
||||
meta {
|
||||
name: v3endpoints
|
||||
seq: 5
|
||||
}
|
||||
|
||||
auth {
|
||||
mode: inherit
|
||||
}
|
||||
16
LogisticsSupportTool_API_DOCS/v3endpoints/tester.bru
Normal file
16
LogisticsSupportTool_API_DOCS/v3endpoints/tester.bru
Normal file
@@ -0,0 +1,16 @@
|
||||
meta {
|
||||
name: tester
|
||||
type: http
|
||||
seq: 1
|
||||
}
|
||||
|
||||
post {
|
||||
url: http://localhost:3000/lst/api/system/prodsql/start
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
2
lstV2/database/migrations/0078_cheerful_the_leader.sql
Normal file
2
lstV2/database/migrations/0078_cheerful_the_leader.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "invHistoricalData" ADD COLUMN "whse_id" text DEFAULT '';--> statement-breakpoint
|
||||
ALTER TABLE "invHistoricalData" ADD COLUMN "whse_name" text DEFAULT 'missing whseName';
|
||||
2298
lstV2/database/migrations/meta/0078_snapshot.json
Normal file
2298
lstV2/database/migrations/meta/0078_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -547,6 +547,13 @@
|
||||
"when": 1763407463567,
|
||||
"tag": "0077_lucky_texas_twister",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 78,
|
||||
"version": "7",
|
||||
"when": 1766514890344,
|
||||
"tag": "0078_cheerful_the_leader",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,36 +1,38 @@
|
||||
import {
|
||||
date,
|
||||
integer,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uuid,
|
||||
date,
|
||||
integer,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createSelectSchema } from "drizzle-zod";
|
||||
|
||||
export const invHistoricalData = pgTable(
|
||||
"invHistoricalData",
|
||||
{
|
||||
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
|
||||
histDate: date("histDate").notNull(), // this date should always be yesterday when we post it.
|
||||
plantToken: text("plantToken"),
|
||||
article: text("article").notNull(),
|
||||
articleDescription: text("articleDescription").notNull(),
|
||||
materialType: text("materialType"),
|
||||
total_QTY: text("total_QTY"),
|
||||
avaliable_QTY: text("avaliable_QTY"),
|
||||
coa_QTY: text("coa_QTY"),
|
||||
held_QTY: text("held_QTY"),
|
||||
lot_Number: text("lot_number"),
|
||||
consignment: text("consignment"),
|
||||
location: text("location"),
|
||||
upd_user: text("upd_user").default("lst"),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
}
|
||||
// (table) => [
|
||||
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||
// uniqueIndex("role_name").on(table.name),
|
||||
// ]
|
||||
"invHistoricalData",
|
||||
{
|
||||
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
|
||||
histDate: date("histDate").notNull(), // this date should always be yesterday when we post it.
|
||||
plantToken: text("plantToken"),
|
||||
article: text("article").notNull(),
|
||||
articleDescription: text("articleDescription").notNull(),
|
||||
materialType: text("materialType"),
|
||||
total_QTY: text("total_QTY"),
|
||||
avaliable_QTY: text("avaliable_QTY"),
|
||||
coa_QTY: text("coa_QTY"),
|
||||
held_QTY: text("held_QTY"),
|
||||
lot_Number: text("lot_number"),
|
||||
consignment: text("consignment"),
|
||||
location: text("location"),
|
||||
whseId: text("whse_id").default(""),
|
||||
whseName: text("whse_name").default("missing whseName"),
|
||||
upd_user: text("upd_user").default("lst"),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
},
|
||||
// (table) => [
|
||||
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||
// uniqueIndex("role_name").on(table.name),
|
||||
// ]
|
||||
);
|
||||
|
||||
// Schema for inserting a user - can be used to validate API requests
|
||||
|
||||
42
lstV2/server/services/dataMart/controller/psiForecastData.ts
Normal file
42
lstV2/server/services/dataMart/controller/psiForecastData.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import { forecastData } from "../../sqlServer/querys/psiReport/forecast.js";
|
||||
|
||||
// type ArticleData = {
|
||||
// id: string
|
||||
// }
|
||||
export const getGetPSIForecastData = async (customer: string) => {
|
||||
let articles: any = [];
|
||||
let queryData = forecastData;
|
||||
console.log(customer);
|
||||
if (customer) {
|
||||
queryData = forecastData.replace("[customer]", customer);
|
||||
}
|
||||
|
||||
const { data, error } = (await tryCatch(
|
||||
query(queryData, "PSI forecast info"),
|
||||
)) as any;
|
||||
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"datamart",
|
||||
"datamart",
|
||||
`There was an error getting the forecast info: ${JSON.stringify(error)}`,
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
messsage: `There was an error getting the forecast info`,
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
articles = data.data;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "PSI forecast Data",
|
||||
data: articles,
|
||||
};
|
||||
};
|
||||
@@ -1,4 +1,4 @@
|
||||
import { and, between, inArray, sql } from "drizzle-orm";
|
||||
import { and, between, inArray, notInArray, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../database/dbclient.js";
|
||||
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
@@ -8,56 +8,79 @@ import { createLog } from "../../logger/logger.js";
|
||||
// id: string
|
||||
// }
|
||||
export const psiGetInventory = async (
|
||||
avs: string,
|
||||
startDate: string,
|
||||
endDate: string
|
||||
avs: string,
|
||||
startDate: string,
|
||||
endDate: string,
|
||||
whseToInclude: string,
|
||||
exludeLanes: string
|
||||
) => {
|
||||
let articles: any = [];
|
||||
let articles: any = [];
|
||||
|
||||
if (!avs) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Missing av's please send at least one over`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
if (!avs) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Missing av's please send at least one over`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
const ids = avs.split(",").map((id) => id.trim());
|
||||
const ids = avs.split(",").map((id) => id.trim());
|
||||
const whse = whseToInclude
|
||||
? whseToInclude
|
||||
.split(",")
|
||||
.map((w) => w.trim())
|
||||
.filter(Boolean)
|
||||
: [];
|
||||
|
||||
const { data, error } = (await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(invHistoricalData)
|
||||
.where(
|
||||
and(
|
||||
inArray(invHistoricalData.article, ids),
|
||||
between(invHistoricalData.histDate, startDate, endDate)
|
||||
)
|
||||
)
|
||||
//.limit(100)
|
||||
)) as any;
|
||||
const locations = exludeLanes
|
||||
? exludeLanes.split(",").map((l) => l.trim()).filter(Boolean)
|
||||
: [];
|
||||
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"datamart",
|
||||
"datamart",
|
||||
`There was an error getting the planning info: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
messsage: `There was an error getting the planning info`,
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
const conditions = [
|
||||
inArray(invHistoricalData.article, ids),
|
||||
between(invHistoricalData.histDate, startDate, endDate),
|
||||
];
|
||||
|
||||
articles = data;
|
||||
console.log(articles.length);
|
||||
return {
|
||||
success: true,
|
||||
message: "PSI planning Data",
|
||||
data: articles,
|
||||
};
|
||||
// only add the warehouse condition if there are any whse values
|
||||
if (whse.length > 0) {
|
||||
console.log("adding whse to include in");
|
||||
conditions.push(inArray(invHistoricalData.whseId, whse));
|
||||
}
|
||||
|
||||
// locations we dont want in the system
|
||||
if (locations.length > 0) {
|
||||
console.log("adding excluded lanes in ",locations);
|
||||
|
||||
conditions.push(notInArray(invHistoricalData.location, locations));
|
||||
}
|
||||
|
||||
const query = db
|
||||
.select()
|
||||
.from(invHistoricalData)
|
||||
.where(and(...conditions));
|
||||
|
||||
// optional tryCatch or await as you had
|
||||
const { data, error } = (await tryCatch(query)) as any;
|
||||
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"datamart",
|
||||
"datamart",
|
||||
`There was an error getting the planning info: ${JSON.stringify(error)}`,
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
messsage: `There was an error getting the planning info`,
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
|
||||
articles = data;
|
||||
console.log(articles.length);
|
||||
return {
|
||||
success: true,
|
||||
message: "PSI planning Data",
|
||||
data: articles,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -13,6 +13,7 @@ import getInhouseDeliveryByDate from "./route/getInHouseDeliveryDateByRange.js";
|
||||
import currentInv from "./route/getInventory.js";
|
||||
import getOpenOrders from "./route/getOpenOrders.js";
|
||||
import psiArticleData from "./route/getPsiArticleData.js";
|
||||
import psiForecastData from "./route/getPsiForecast.js";
|
||||
import psiInventory from "./route/getPsiinventory.js";
|
||||
import psiPlanningData from "./route/getPsiPlanningData.js";
|
||||
import psiProductionData from "./route/getPsiProductionData.js";
|
||||
@@ -37,6 +38,7 @@ const routes = [
|
||||
psiPlanningData,
|
||||
psiProductionData,
|
||||
psiInventory,
|
||||
psiForecastData,
|
||||
] as const;
|
||||
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
|
||||
65
lstV2/server/services/dataMart/route/getPsiForecast.ts
Normal file
65
lstV2/server/services/dataMart/route/getPsiForecast.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { getGetPSIForecastData } from "../controller/psiForecastData.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
const Body = z.object({
|
||||
includeRunnningNumbers: z.string().openapi({ example: "x" }),
|
||||
});
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["dataMart"],
|
||||
summary: "Returns the psiforecastdata.",
|
||||
method: "get",
|
||||
path: "/psiforecastdata",
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
"application/json": { schema: Body },
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
const customer: any = c.req.queries();
|
||||
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
apiHit(c, { endpoint: "/psiforecastdata" });
|
||||
//console.log(articles["avs"][0]);
|
||||
|
||||
let customeArticle = null;
|
||||
if (customer) {
|
||||
customeArticle = customer["customer"][0];
|
||||
}
|
||||
const { data, error } = await tryCatch(
|
||||
getGetPSIForecastData(customeArticle),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
console.log(error);
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error getting the articles.",
|
||||
data: error,
|
||||
},
|
||||
400,
|
||||
);
|
||||
}
|
||||
|
||||
//console.log(data);
|
||||
|
||||
return c.json(
|
||||
{
|
||||
success: data.success,
|
||||
message: data.message,
|
||||
data: data.data,
|
||||
},
|
||||
data.success ? 200 : 400,
|
||||
);
|
||||
},
|
||||
);
|
||||
export default app;
|
||||
@@ -1,64 +1,66 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { psiGetInventory } from "../controller/psiGetInventory.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
const Body = z.object({
|
||||
includeRunnningNumbers: z.string().openapi({ example: "x" }),
|
||||
includeRunnningNumbers: z.string().openapi({ example: "x" }),
|
||||
});
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["dataMart"],
|
||||
summary: "Returns the getPsiinventory.",
|
||||
method: "get",
|
||||
path: "/getpsiinventory",
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
"application/json": { schema: Body },
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
const q: any = c.req.queries();
|
||||
createRoute({
|
||||
tags: ["dataMart"],
|
||||
summary: "Returns the getPsiinventory.",
|
||||
method: "get",
|
||||
path: "/getpsiinventory",
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
"application/json": { schema: Body },
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
const q: any = c.req.queries();
|
||||
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
apiHit(c, { endpoint: "/getpsiinventory" });
|
||||
//console.log(articles["avs"][0]);
|
||||
const { data, error } = await tryCatch(
|
||||
psiGetInventory(
|
||||
q["avs"] ? q["avs"][0] : null,
|
||||
q["startDate"] ? q["startDate"][0] : null,
|
||||
q["endDate"] ? q["endDate"][0] : null
|
||||
)
|
||||
);
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
apiHit(c, { endpoint: "/getpsiinventory" });
|
||||
//console.log(articles["avs"][0]);
|
||||
const { data, error } = await tryCatch(
|
||||
psiGetInventory(
|
||||
q["avs"] ? q["avs"][0] : null,
|
||||
q["startDate"] ? q["startDate"][0] : null,
|
||||
q["endDate"] ? q["endDate"][0] : null,
|
||||
q["whseToInclude"] ? q["whseToInclude"][0] : null,
|
||||
q["exludeLanes"] ? q["exludeLanes"][0] : null,
|
||||
),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
console.log(error);
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error getting the production.",
|
||||
data: error,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
if (error) {
|
||||
console.log(error);
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error getting the production.",
|
||||
data: error,
|
||||
},
|
||||
400,
|
||||
);
|
||||
}
|
||||
|
||||
//console.log(data);
|
||||
//console.log(data);
|
||||
|
||||
return c.json(
|
||||
{
|
||||
success: data.success,
|
||||
message: data.message,
|
||||
data: data.data,
|
||||
},
|
||||
data.success ? 200 : 400
|
||||
);
|
||||
}
|
||||
return c.json(
|
||||
{
|
||||
success: data.success,
|
||||
message: data.message,
|
||||
data: data.data,
|
||||
},
|
||||
data.success ? 200 : 400,
|
||||
);
|
||||
},
|
||||
);
|
||||
export default app;
|
||||
|
||||
@@ -2,65 +2,71 @@ import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
import stats from "./route/stats.js";
|
||||
import history from "./route/invHistory.js";
|
||||
import { createJob } from "../notifications/utils/processNotifications.js";
|
||||
import { historicalInvIMmport } from "./utils/historicalInv.js";
|
||||
import { tryCatch } from "../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../logger/logger.js";
|
||||
import { createJob } from "../notifications/utils/processNotifications.js";
|
||||
import { query } from "../sqlServer/prodSqlServer.js";
|
||||
import { shiftChange } from "../sqlServer/querys/misc/shiftChange.js";
|
||||
import { createLog } from "../logger/logger.js";
|
||||
import gpData from "./route/getGpData.js";
|
||||
import lastPurch from "./route/getLastPurchPrice.js";
|
||||
import lastSales from "./route/getLastSalesPrice.js";
|
||||
import gpData from "./route/getGpData.js";
|
||||
import consumptionData from "./route/getProductionConsumption.js";
|
||||
import purchased from "./route/getPurchased.js";
|
||||
import regrind from "./route/getregrind.js";
|
||||
import soldItems from "./route/getSoldItems.js";
|
||||
import purchased from "./route/getPurchased.js";
|
||||
import history from "./route/invHistory.js";
|
||||
import stats from "./route/stats.js";
|
||||
import { historicalInvIMmport } from "./utils/historicalInv.js";
|
||||
|
||||
const routes = [
|
||||
stats,
|
||||
history,
|
||||
lastPurch,
|
||||
lastSales,
|
||||
gpData,
|
||||
consumptionData,
|
||||
regrind,
|
||||
soldItems,
|
||||
purchased,
|
||||
stats,
|
||||
history,
|
||||
lastPurch,
|
||||
lastSales,
|
||||
gpData,
|
||||
consumptionData,
|
||||
regrind,
|
||||
soldItems,
|
||||
purchased,
|
||||
] as const;
|
||||
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
app.route("/eom", route);
|
||||
app.route("/eom", route);
|
||||
});
|
||||
|
||||
setTimeout(async () => {
|
||||
const { data: shift, error: shiftError } = (await tryCatch(
|
||||
query(shiftChange, "shift change from material.")
|
||||
)) as any;
|
||||
const { data: shift, error: shiftError } = (await tryCatch(
|
||||
query(shiftChange, "shift change from material."),
|
||||
)) as any;
|
||||
|
||||
if (shiftError) {
|
||||
createLog(
|
||||
"error",
|
||||
"eom",
|
||||
"eom",
|
||||
"There was an error getting the shift times will use fallback times"
|
||||
);
|
||||
}
|
||||
if (shiftError) {
|
||||
createLog(
|
||||
"error",
|
||||
"eom",
|
||||
"eom",
|
||||
"There was an error getting the shift times will use fallback times",
|
||||
);
|
||||
}
|
||||
|
||||
// shift split
|
||||
const shiftTimeSplit = shift?.data[0]?.shiftChange.split(":");
|
||||
// shift split
|
||||
const shiftTimeSplit = shift?.data[0]?.shiftChange.split(":");
|
||||
|
||||
const cronSetup = `${
|
||||
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[1])}` : "0"
|
||||
} ${
|
||||
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[0])}` : "7"
|
||||
} * * *`;
|
||||
const cronSetup = `${
|
||||
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[1])}` : "0"
|
||||
} ${
|
||||
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[0])}` : "7"
|
||||
} * * *`;
|
||||
|
||||
//console.log(cronSetup);
|
||||
createJob("eom_historical_inv", cronSetup, historicalInvIMmport);
|
||||
//console.log(cronSetup);
|
||||
createJob("eom_historical_inv", cronSetup, historicalInvIMmport);
|
||||
}, 5 * 1000);
|
||||
// the time we want to run the hostircal data should be the same time the historical data run on the server
|
||||
// getting this from the shift time
|
||||
|
||||
if (process.env.NODE_ENV?.trim() !== "production") {
|
||||
setTimeout(() => {
|
||||
historicalInvIMmport();
|
||||
}, 15 * 1000);
|
||||
}
|
||||
|
||||
export default app;
|
||||
|
||||
@@ -76,7 +76,10 @@ export const historicalInvIMmport = async () => {
|
||||
coa_QTY: i.COA_QTY,
|
||||
held_QTY: i.Held_QTY,
|
||||
consignment: i.Consigment,
|
||||
lot_Number: i.lot,
|
||||
lot_Number: i.Lot,
|
||||
location: i.location,
|
||||
whseId: i.warehouseID,
|
||||
whseName: i.warehouseName,
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { addDays, addHours, isAfter, parse } from "date-fns";
|
||||
import { format } from "date-fns-tz";
|
||||
import XLSX from "xlsx";
|
||||
import { db } from "../../../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../../../database/schema/settings.js";
|
||||
@@ -92,7 +93,7 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
orders: [],
|
||||
};
|
||||
const oOrders: any = openOrders;
|
||||
|
||||
//console.log(orderData);
|
||||
let correctedOrders: any = orderData
|
||||
.filter(
|
||||
(o: any) =>
|
||||
@@ -147,6 +148,7 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
|
||||
// Map Excel data to predefinedObject format
|
||||
const orders = filterOrders.map((o: any) => {
|
||||
//console.log(o.po, " ", o.date, format(o.date, "M/d/yyyy HH:mm"));
|
||||
return {
|
||||
customerId: customerID,
|
||||
invoiceAddressId: invoiceID,
|
||||
@@ -157,7 +159,7 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
deliveryAddressId: 8,
|
||||
customerArticleNo: o.customerArticlenumber,
|
||||
quantity: o.qty,
|
||||
deliveryDate: addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
|
||||
deliveryDate: addHours(format(o.date, "M/d/yyyy HH:mm"), 1), //addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
|
||||
customerLineItemNo: 1, // this is how it is currently sent over from abbott
|
||||
customerReleaseNo: 1, // same as above
|
||||
},
|
||||
@@ -165,6 +167,7 @@ export const abbottOrders = async (data: any, user: any) => {
|
||||
};
|
||||
});
|
||||
|
||||
//console.log(orders);
|
||||
// combine it all together.
|
||||
const updatedPredefinedObject = {
|
||||
...predefinedObject,
|
||||
|
||||
@@ -1,34 +1,60 @@
|
||||
import { getJsDateFromExcel } from "excel-date-to-js";
|
||||
|
||||
export const excelDateStuff = (serial: number, time: any = 0) => {
|
||||
// console.log(serial);
|
||||
// add 5 hours or the offset to utc
|
||||
// export const excelDateStuff = (serial: number, time?: any) => {
|
||||
// // add 5 hours or the offset to utc
|
||||
|
||||
// get the local timezone
|
||||
const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number;
|
||||
// // get the local timezone
|
||||
// const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number;
|
||||
|
||||
if (serial % 1 === 0) {
|
||||
time = 800;
|
||||
}
|
||||
// if (!time) {
|
||||
// time = 800;
|
||||
// }
|
||||
|
||||
const addHours = serial + localoffset / 24;
|
||||
//console.log(getJsDateFromExcel(addHours));
|
||||
if (typeof serial !== "number" || serial <= 0) {
|
||||
return "invalid Date";
|
||||
}
|
||||
// const addHours = serial + localoffset / 24;
|
||||
// //console.log(getJsDateFromExcel(addHours));
|
||||
// if (typeof serial !== "number" || serial <= 0) {
|
||||
// return "invalid Date";
|
||||
// }
|
||||
|
||||
const date = getJsDateFromExcel(addHours); // base date from Excel serial
|
||||
// const date = getJsDateFromExcel(addHours); // base date from Excel serial
|
||||
|
||||
if (time != 0) {
|
||||
// convert the time over to hour and min
|
||||
const hours = Math.floor(time / 100);
|
||||
const minutes = time % 100;
|
||||
date.setHours(hours);
|
||||
date.setMinutes(minutes);
|
||||
}
|
||||
//console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
|
||||
// if (time != 0) {
|
||||
// // convert the time over to hour and min
|
||||
// const hours = Math.floor(time / 100);
|
||||
// const minutes = time % 100;
|
||||
// date.setHours(hours);
|
||||
// date.setMinutes(minutes);
|
||||
// }
|
||||
// //console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
|
||||
|
||||
//console.log(serial);
|
||||
//console.log(date.toISOString());
|
||||
return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred
|
||||
// //console.log(serial);
|
||||
// console.log(date.toISOString(), serial, time);
|
||||
// return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred
|
||||
// };
|
||||
|
||||
export const excelDateStuff = (serial: number, time?: any) => {
|
||||
if (typeof serial !== "number" || serial <= 0) {
|
||||
return "invalid Date";
|
||||
}
|
||||
|
||||
// Default time to 8:00 AM if not provided
|
||||
if (!time) {
|
||||
time = 800;
|
||||
}
|
||||
|
||||
// Get base date from Excel serial (this gives you UTC midnight)
|
||||
const date = getJsDateFromExcel(serial);
|
||||
|
||||
const localOffset = new Date().getTimezoneOffset() / 60;
|
||||
const hours = Math.floor(time / 100);
|
||||
const minutes = time % 100;
|
||||
|
||||
// Set the time in UTC
|
||||
date.setUTCHours(hours + localOffset);
|
||||
date.setUTCMinutes(minutes);
|
||||
date.setUTCSeconds(0);
|
||||
date.setUTCMilliseconds(0);
|
||||
|
||||
//console.log(date.toISOString(), serial, time);
|
||||
return date.toISOString();
|
||||
};
|
||||
|
||||
@@ -101,7 +101,7 @@ export const qualityCycle = async () => {
|
||||
.where(eq(qualityRequest.runningNr, lstQData[i].runningNr));
|
||||
|
||||
createLog(
|
||||
"info",
|
||||
"debug",
|
||||
"lst",
|
||||
"quality",
|
||||
`Pallet ${lstQData[i].runningNr} was updated`,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export const inhouseDelivery = `
|
||||
declare @shiftStart varchar(max) = (select top(1) CAST(StartDate AS time(0)) from [test1_AlplaPROD2.0_Read].[masterData].[ShiftDefinition] (nolock) order by TeamNumber)
|
||||
|
||||
SELECT TOP (1000)
|
||||
SELECT
|
||||
ProduktionsLos as lot
|
||||
,Menge as qty
|
||||
,Barcode as barcode
|
||||
|
||||
@@ -17,6 +17,10 @@ x.ArtikelVariantenAlias as Alias
|
||||
,IdProdPlanung as Lot
|
||||
----,IdAdressen,
|
||||
,x.AdressBez
|
||||
,x.IdLagerAbteilung as 'location'
|
||||
,x.LagerAbteilungKurzBez
|
||||
,x.IdWarenlager as warehouseID
|
||||
,x.WarenLagerKurzBez as warehouseName
|
||||
--,*
|
||||
from [AlplaPROD_test1].dbo.[V_LagerPositionenBarcodes] (nolock) x
|
||||
|
||||
@@ -39,6 +43,10 @@ group by x.idartikelVarianten, ArtikelVariantenAlias, c.Description
|
||||
--,IdAdressen
|
||||
,x.AdressBez
|
||||
,IdProdPlanung
|
||||
,x.IdLagerAbteilung
|
||||
,x.LagerAbteilungKurzBez
|
||||
,x.IdWarenlager
|
||||
,x.WarenLagerKurzBez
|
||||
--, x.Lfdnr
|
||||
order by x.IdArtikelVarianten
|
||||
|
||||
|
||||
12
lstV2/server/services/sqlServer/querys/misc/featureCheck.ts
Normal file
12
lstV2/server/services/sqlServer/querys/misc/featureCheck.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export const featureCheck = `
|
||||
SELECT count(*)
|
||||
FROM [test2_AlplaPROD2.0_Read].[support].[FeatureActivation]
|
||||
|
||||
where feature in (108,7)
|
||||
`;
|
||||
|
||||
/*
|
||||
as more features get activated and need to have this checked to include the new endpoints add here so we can check this.
|
||||
108 = waste
|
||||
7 = warehousing
|
||||
*/
|
||||
10
lstV2/server/services/sqlServer/querys/psiReport/forecast.ts
Normal file
10
lstV2/server/services/sqlServer/querys/psiReport/forecast.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export const forecastData = `
|
||||
SELECT format(cast(RequirementDate as date),'M/d/yyyy') as requirementDate
|
||||
,ArticleHumanReadableId
|
||||
,CustomerArticleNumber
|
||||
,ArticleDescription
|
||||
,Quantity
|
||||
FROM [test1_AlplaPROD2.0_Read].[forecast].[Forecast]
|
||||
where DeliveryAddressHumanReadableId = [customer]
|
||||
order by RequirementDate
|
||||
`;
|
||||
Reference in New Issue
Block a user