Compare commits

...

8 Commits

71 changed files with 21608 additions and 17626 deletions

1
.gitignore vendored
View File

@@ -194,3 +194,4 @@ controller/Dockerfile
controller/Dockerfile-ignore
controller/docker-compose.yml
controller/index.html
controller/index.html

View File

@@ -43,5 +43,6 @@
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports"
"go.formatTool": "goimports",
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"]
}

View File

@@ -0,0 +1,8 @@
meta {
name: admin
seq: 3
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,27 @@
meta {
name: Add Server
type: http
seq: 3
}
post {
url: {{url}}/lst/api/admin/server
body: json
auth: inherit
}
body:json {
{
"name": "Test Server",
"serverDNS": "USMCD1VMS036",
"plantToken": "test3",
"ipAddress": "10.193.0.56",
"greatPlainsPlantCode": 0,
"lstServerPort": 4000,
"serverLoc": "E$\\LST"
}
}
settings {
encodeUrl: true
}

View File

@@ -0,0 +1,15 @@
meta {
name: Get Servers
type: http
seq: 1
}
get {
url: {{url}}/lst/api/admin/server
body: none
auth: inherit
}
settings {
encodeUrl: true
}

View File

@@ -0,0 +1,15 @@
meta {
name: Update Server
type: http
seq: 2
}
patch {
url: {{url}}/lst/api/admin/server
body: none
auth: inherit
}
settings {
encodeUrl: true
}

View File

@@ -0,0 +1,8 @@
meta {
name: server
seq: 2
}
auth {
mode: inherit
}

View File

@@ -4,6 +4,8 @@ import { requireAuth } from "../../pkg/middleware/authMiddleware.js";
//admin routes
import users from "./routes/getUserRoles.js";
import grantRoles from "./routes/grantRole.js";
import servers from "./routes/servers/serverRoutes.js";
import { restrictToHosts } from "../../pkg/middleware/restrictToHosts.js";
export const setupAdminRoutes = (app: Express, basePath: string) => {
app.use(
@@ -16,4 +18,11 @@ export const setupAdminRoutes = (app: Express, basePath: string) => {
requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
grantRoles
);
app.use(
basePath + "/api/admin/server",
requireAuth("user", ["systemAdmin", "admin"]), // will pass bc system admin but this is just telling us we need this
restrictToHosts(["usmcd1vms036", "USMCD1VMS036"]), // what servers are allowed to see the server section
servers
);
};

View File

@@ -0,0 +1,74 @@
import { Router } from "express";
import type { Request, Response } from "express";
import {
insertServerDataSchema,
serverData,
} from "../../../../pkg/db/schema/servers.js";
import { db } from "../../../../pkg/db/db.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import type { DrizzleError } from "drizzle-orm";
import axios from "axios";
import { createLogger } from "../../../../pkg/logger/logger.js";
const router = Router();
router.post("/", async (req: Request, res: Response) => {
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "admin", subModule: "add server" });
const parsed = insertServerDataSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ errors: parsed.error.flatten() });
}
const { data, error } = await tryCatch(
db
.insert(serverData)
.values(parsed.data)
//.onConflictDoNothing()
.returning({
name: serverData.name,
plantToken: serverData.plantToken,
})
);
if (error) {
const err: DrizzleError = error;
return res.status(400).json({
message: `Error adding the server`,
error: err.cause,
});
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const { data, error } = await tryCatch(
axios.post(
`${process.env.MAIN_SERVER}/lst/api/admin/server`,
parsed.data,
{
headers: {
"Content-Type": "application/json",
Cookie: req.headers.cookie ?? "",
},
withCredentials: true,
}
)
);
if (error) {
log.error(
{ stack: error },
"There was an error adding the server to Main Server"
);
}
log.info({ stack: data }, "A new Server was just added to the server.");
}
return res
.status(201)
.json({ message: `Server ${data[0]?.name} added`, data: data });
});
export default router;

View File

@@ -0,0 +1,25 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { and, asc, eq } from "drizzle-orm";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const { data, error } = await tryCatch(
db
.select()
.from(serverData)
.where(eq(serverData.active, true))
.orderBy(asc(serverData.name))
);
if (error) {
return res.status(400).json({ error: error });
}
res.status(200).json({ message: "Current Active server", data: data });
});
export default router;

View File

@@ -0,0 +1,12 @@
import { Router } from "express";
import addServer from "./addServer.js";
import getServers from "./getServers.js";
import updateServer from "./updateServer.js";
const router = Router();
router.get("/", getServers);
router.post("/", addServer);
router.patch("/", updateServer);
export default router;

View File

@@ -0,0 +1,11 @@
import { Router } from "express";
import type { Request, Response } from "express";
const router = Router();
router.patch("/", async (req: Request, res: Response) => {
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
res.status(200).json({ message: "Server added" });
});
export default router;

View File

@@ -27,7 +27,9 @@ router.get("/", async (req, res) => {
uptime: process.uptime(),
build: statData[0]?.build,
pendingUpdateFile: await checkBuildUpdate(["."]),
lastUpdate: format(statData[0].lastUpdate!, "MM/dd/yyyy HH:mm"),
lastUpdate: statData[0]?.lastUpdate
? format(statData[0].lastUpdate, "MM/dd/yyyy HH:mm")
: "",
});
});

View File

@@ -0,0 +1,49 @@
import {
boolean,
integer,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
} from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import z from "zod";
export const serverData = pgTable(
"serverData",
{
server_id: uuid("server_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
serverDNS: text("serverDNS").notNull(),
plantToken: text("plantToken").notNull(),
ipAddress: text("ipAddress").notNull(),
greatPlainsPlantCode: integer("greatPlainsPlantCode").notNull(),
streetAddress: text("streetAddress"),
cityState: text("cityState"),
zipcode: integer("zipcode"),
contactEmail: text("contactEmail"),
contactPhone: text("contactPhone"),
customerTiAcc: text("customerTiAcc"),
lstServerPort: integer("lstServerPort").notNull(),
active: boolean("active").default(true),
serverLoc: text("serverLoc").notNull(),
lastUpdated: timestamp("lastUpdated").defaultNow(),
isUpgrading: boolean("isUpgrading").default(false),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("plantToken").on(table.plantToken),
]
);
export const selectServerDataSchema = createSelectSchema(serverData);
export const insertServerDataSchema = createInsertSchema(serverData).extend({
contactEmail: z.email().optional(),
// zipcode: z
// .string()
// .regex(/^\d{5}$/)
// .optional(),
});

View File

@@ -0,0 +1,30 @@
import type { Request, Response, NextFunction } from "express";
/**
* Middleware to restrict access only to localhost or a whitelist of hosts.
*/
export function restrictToHosts(allowedHosts: string[] = []) {
return (req: Request, res: Response, next: NextFunction) => {
// `req.ip` gives the remote IP
const ip = req.ip!.replace("::ffff:", ""); // strip IPv6 prefix if present
// Express sets req.hostname from the Host header
const hostname = req.hostname;
const isLocal =
ip === "127.0.0.1" || ip === "::1" || hostname === "localhost";
const isAllowed =
isLocal ||
allowedHosts.includes(ip) ||
allowedHosts.includes(hostname);
if (!isAllowed) {
return res
.status(403)
.json({ error: "Access not allowed from this host" });
}
next();
};
}

View File

@@ -84,7 +84,7 @@ finally {
return
}
server.BroadcastToRoom("/", "update", "updateLogs", fmt.Sprintf("✅ Copy to %s successful", plant))
server.BroadcastToRoom("/", "update", "updateLogs", "done")
}
func lastestBuild() string {

View File

@@ -3,13 +3,15 @@ module lst.net
go 1.24.3
require (
github.com/bwmarrin/discordgo v0.29.0
github.com/gin-gonic/gin v1.10.1
github.com/googollee/go-socket.io v1.7.0
github.com/jackc/pgx/v5 v5.7.6
github.com/joho/godotenv v1.5.1
github.com/robfig/cron/v3 v3.0.0
)
require (
github.com/bwmarrin/discordgo v0.29.0 // indirect
github.com/bytedance/sonic v1.11.6 // indirect
github.com/bytedance/sonic/loader v0.1.1 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
@@ -29,7 +31,6 @@ require (
github.com/hirochachacha/go-smb2 v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgx/v5 v5.7.6 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
@@ -39,7 +40,6 @@ require (
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/prometheus-community/pro-bing v0.7.0 // indirect
github.com/robfig/cron v1.2.0 // indirect
github.com/robfig/cron/v3 v3.0.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
golang.org/x/arch v0.8.0 // indirect

View File

@@ -180,22 +180,22 @@
{
name: "Houston",
server: "USHOU1VMS006",
location: "E$\\LST\\LST",
location: "E$\\LST",
},
{
name: "Sherman",
server: "USSHE1VMS006",
location: "E$\\LST\\LST",
location: "E$\\LST",
},
{
name: "West Bend",
server: "USWEB1VMS006",
location: "E$\\LST\\LST",
location: "E$\\LST",
},
{
name: "Jerfferson City",
server: "USJCI1VMS006",
location: "E$\\LST\\LST",
location: "E$\\LST",
},
];
@@ -324,9 +324,7 @@
});
logMessage(
"info",
`Copying to ${
srv.name
} (drive ${srv.drive.toUpperCase()})`
`Copying to ${srv.name} (location ${srv.location})`
);
});
@@ -364,15 +362,13 @@
currentServer = copyQueue.shift();
logMessage(
"info",
`🚀 Copying to ${
currentServer.name
} (drive ${currentServer.drive.toUpperCase()})`
`Copying to ${currentServer.name} (location ${currentServer.location})`
);
socket.emit("update", {
action: "copy",
target: currentServer.name,
drive: currentServer.drive,
target: currentServer.server,
location: currentServer.location,
});
}
@@ -381,7 +377,8 @@
// Only check queue progress if we're in All mode and have a currentServer
if (isRunningAll && currentServer) {
const expected = `✅ Copy to ${currentServer.name} successful`;
//const expected = `✅ Copy to ${currentServer.name} successful`;
const expected = "done";
if (msg.includes(expected)) {
logMessage(

45
controller/load_env.go Normal file
View File

@@ -0,0 +1,45 @@
package main
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/joho/godotenv"
)
func loadEnv() {
exePath, _ := os.Executable()
exeDir := filepath.Dir(exePath)
// Normalize both to lowercase absolute paths for Windows safety
exePathLower := strings.ToLower(exePath)
tempDirLower := strings.ToLower(filepath.Clean(os.TempDir()))
// Heuristic: if exe lives *inside* the system temp dir → assume go run
if strings.HasPrefix(exePathLower, tempDirLower) {
fmt.Println("Detected go run loading ../.env")
err := godotenv.Load("../.env")
if err != nil {
fmt.Println("ERROR loading .env:", err)
} else {
fmt.Println(".env successfully loaded")
}
return
}
// Otherwise → normal compiled exe
fmt.Println("Detected compiled exe loading exeDir/.env")
if err := godotenv.Load(filepath.Join(exeDir, ".env")); err != nil {
fmt.Println("Didn't find exeDir/.env trying ../.env as fallback")
err := godotenv.Load("../.env")
if err != nil {
fmt.Println("ERROR loading .env:", err)
} else {
fmt.Println(".env successfully loaded")
}
}
}

View File

@@ -5,26 +5,18 @@ import (
"log"
"net/http"
"os"
"path/filepath"
"strings"
"time"
"github.com/gin-gonic/gin"
socketio "github.com/googollee/go-socket.io"
"github.com/joho/godotenv"
"lst.net/internal/bot"
"lst.net/pkg"
)
func main() {
exePath, _ := os.Executable()
exeDir := filepath.Dir(exePath)
if err := godotenv.Load(filepath.Join(exeDir, ".env")); err != nil {
// fallback dev path
_ = godotenv.Load("../.env")
}
loadEnv()
// gin stuff
basePath := "/api/controller"

View File

@@ -21,6 +21,8 @@ func UpdateServerStats(buildNumber int64) {
os.Getenv("DATABASE_DB"),
)
fmt.Println(url)
ctx := context.Background()
conn, err := pgx.Connect(ctx, url)

View File

@@ -99,6 +99,11 @@ func triggerRemoteUpdate(server *socketio.Server, token string, payload UpdatePa
body, _ := json.Marshal(payload)
url := fmt.Sprintf("https://%vprod.alpla.net%v/update", token, basePath)
if token == "test1" || token == "test2" || token == "test3" {
url = fmt.Sprintf("https://USMCD1vms036.alpla.net/%v/update", basePath)
}
fmt.Println(url)
//url := fmt.Sprintf("http://%v:8080/api/controller/update", remoteURL)
fmt.Println(url)

View File

@@ -0,0 +1,7 @@
{
"label": "End of Month",
"position": 2,
"link": {
"type": "generated-index"
}
}

View File

@@ -0,0 +1,53 @@
---
sidebar_position: 2
id: eom-material-transfer
---
# EOM Material Transfer
## Overview
Below we will describe the process to transfer material to the next month.
This process will help improve the material consistency of our materials and make sure all reporting is as accurate as possible.
This process effects the outcome of how the data looks in alplamart as well as our kpi's.
Special notes to this process. The eom transfer toggle will only be active for 1 hour on the 1st of the month.
Example your plants shift time is 7am the Toggle will only work from 6am to 7am.
The logic to this process.
When you transfer the material weather its to the same lot of a new one it will be returned to the stock, you will see this as a reprint and return to FM1.
after the allowed time is up the material will then be re-consumed automatically to the lot you entered into the form.
## Transferring material
Please head over to lst and click on the materials tab on the left.
If your side bar is collapsed its the icon that looks like a box
![](/img/eom/material-link.png)
once on this page scroll to the bottom.
- Click the actual amount tab
- Toggle the eom transfer
![](/img/eom/eomtransfer.png)
1. Pull the gaylord of material off the line and weight it
2. Enter the amount into the actual amount field
3. Enter the running number of the label on the gaylord.
- there is no way around this you will be required to had a running number
4. Enter the lot number you will be transferring the material too
- This can be the same lot you are currently on if it would continue past the allowed time. example the lot goes until 10am on teh 1st you would use the current lot for this transfer
5. Press Transfer to lot

View File

@@ -0,0 +1,68 @@
---
sidebar_position: 1
id: fg-pkg
---
# Finished goods and Packaging counts
## Overview
The EOM-FG-PKG file will be the file you will be sending into finance, this file will include the current inventories as of 7am or the time the shift change is in your plant.
You will need to review all the data and make the necessary adjustments to the file before sending into finance.
NOTE: This includes PPOO adjustments as the data is not capturing ppoo
All adjustments should also be corrected in alplaprod do not just update the file as this data is also pulled into alplamart automatically.
NOTE:
- That all manual changes will be reflected on the next months report in alpla mart there is no way to correct these.
- All manual changes will be over written each time the get data button is pressed.
## Getting and updating the file
You will need to get the template from the email sent out on the first. please only use the file that is provided via the email to send into finance as this will always be the most updated file.
NOTE: if this file opens in the browser you will need to open in desktop by clicking on the far right
![](/img/eom/openInDesktop.png)
Once you have the file open change please change the plant to your plant and the date to the first of the month
please note that it must be the first day of the month so all the correct data can be pulled into the file, you can still of course run this though out the month by entering any date you want to validate or check the inventory on.
![](/img/eom/eom-fg-tab.png)
once you have selected the server and entered the first of the month you can press get data.
This will pull all the necessary data to complete this report. when you validate this data depending on the time of day you run it. it could be off due to the data being historical and only updating to 7am or shift time.
If you want to compare the data you will need to use alplamart to check this under the inventory section.
NOTE that you could be off by 1 pallet due to calculations and timing.
Please review both the FG and Packaging tab to make sure all inventory data is correct.
## Finished goods tab
The finished goods tab dose not include ppoo so you will need to add this in manually and validate the ppoo is a real pallet.
other things to look at in the FG tab are the pallets on hold and COA hold to make sure you agree with these numbers, as we know sometimes pallets are left on coa hold and not moved to a real hold.
as stated in the overview you will need to make sure if you change the coa to the hold column quality must also do the same.
For consignment when you make sure adjustments in consignment make sure you are also doing the consignment invoicing process.
This section will also be semi manual due to customer inventory not being available until the 1st and sometimes after the alplaprod historical updates happen.
## Packaging Tab
![](/img/eom/packaging-tab.png)
- The packaging tab includes the inventory currently in the system as of 7am or shift change time.
- UOM data is pulled from the purchase price, no UOM present the default is 1
- For clarity on this setup and process please see [HERE](https://alpla.sharepoint.com/:p:/r/sites/ITS006931/Purchase%20and%20Incoming%20excel%20files/UoM_Conversion_AP2GP.pptx?d=w24a28265c261497dbab286d1d811a3fb&csf=1&web=1&e=P229Gh)
- If you update the UOM you can rerun the file to get the corrected data in to this file this is not limited to the time frame.
- When manually changing the data enter this into the count column
- This column will be reset on each run to please consider this when making manual changes

View File

@@ -1,6 +1,6 @@
{
"label": "Install",
"position": 3,
"position": 10,
"link": {
"type": "generated-index"
}

View File

@@ -1,5 +1,6 @@
---
sidebar_position: 2
sidebar_position: 3
id: install-iis
---
# Runing in IIS (Internet Information Services)

View File

@@ -1,5 +1,5 @@
---
sidebar_position: 2
sidebar_position: 1
id: install
---

View File

@@ -1,5 +1,6 @@
---
sidebar_position: 1
sidebar_position: 2
id: install-service
---
# Run as a service
@@ -38,7 +39,7 @@ This will completely stop the service and you will need to start it
## Start the Service
Yet again utlizeing the same script
Yet again utilizing the same script
```bash
.\scripts\services.ps1 -serviceName "LST_app" -option "start" -appPath "E:\LST"

View File

@@ -1,6 +1,6 @@
{
"label": "One Click Print",
"position": 4,
"position": 1,
"link": {
"type": "generated-index"
}

1916
lstDocs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -15,8 +15,8 @@
"typecheck": "tsc"
},
"dependencies": {
"@docusaurus/core": "3.8.1",
"@docusaurus/preset-classic": "3.8.1",
"@docusaurus/core": "^3.9.1",
"@docusaurus/preset-classic": "^3.9.1",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@@ -24,9 +24,9 @@
"react-dom": "^19.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.8.1",
"@docusaurus/tsconfig": "3.8.1",
"@docusaurus/types": "3.8.1",
"@docusaurus/module-type-aliases": "^3.9.1",
"@docusaurus/tsconfig": "^3.9.1",
"@docusaurus/types": "^3.9.1",
"typescript": "~5.6.2"
},
"browserslist": {

Binary file not shown.

After

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

View File

@@ -68,7 +68,7 @@ export const Route = createRootRoute({
<div className="mr-1 ml-1">
{settings.length > 0 && (
<a
href={`https://${server[0].value}.alpla.net/lst/d`}
href={`https://${server[0].value}prod.alpla.net/lst/d`}
target="_blank"
>
LST - Docs |

View File

@@ -1,7 +1,11 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { activeArticle } from "../../sqlServer/querys/dataMart/article.js";
export const getActiveAv = async () => {
export const getActiveAv = async (includePlantToken: boolean = false) => {
let articles: any = [];
try {
const res = await query(activeArticle, "Get active articles");
@@ -10,5 +14,20 @@ export const getActiveAv = async () => {
articles = error;
}
if (includePlantToken) {
const { data, error } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (error) {
console.log("Error getting articles");
return articles;
}
return articles.map((n: any) => {
return { plantToken: data[0].value, ...n };
});
} else {
return articles;
}
};

View File

@@ -5,7 +5,10 @@ import {
totalInvRn,
} from "../../sqlServer/querys/dataMart/totalINV.js";
export const getINV = async (rn: boolean) => {
export const getINV = async (
rn: boolean,
includePlantToken: boolean = false
) => {
let inventory: any = [];
let updatedQuery = totalInvNoRn;

View File

@@ -22,13 +22,17 @@ app.openapi(
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const includePlantToken: any = c.req.queries();
apiHit(c, { endpoint: "/getarticles" });
try {
return c.json(
{
success: true,
message: "Current active Articles",
data: await getActiveAv(),
data: await getActiveAv(
includePlantToken["includePlantToken"] ? true : false
),
},
200
);

View File

@@ -3,8 +3,12 @@ import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { format } from "date-fns";
import { settings } from "../../../../database/schema/settings.js";
export const historicalInvByDate = async (date: string) => {
export const historicalInvByDate = async (
date: string,
includePlantToken: boolean = false
) => {
const histDate = new Date(date);
const { data, error } = (await tryCatch(
@@ -24,9 +28,28 @@ export const historicalInvByDate = async (date: string) => {
};
}
if (includePlantToken) {
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return data;
}
return {
success: true,
message: `Historical inventory for ${date}`,
data: data.map((n: any) => {
return { plantToken: s[0].value, ...n };
}),
};
} else {
return {
success: true,
message: `Historical inventory for ${date}`,
data: data,
};
}
};

View File

@@ -2,8 +2,11 @@ import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { format } from "date-fns";
import { query } from "../../sqlServer/prodSqlServer.js";
import { lastPurchasePrice } from "../../sqlServer/querys/eom/lstPurchasePrice.js";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
export const lastPurchase = async () => {
export const lastPurchase = async (includePlantToken: boolean = false) => {
const { data, error } = (await tryCatch(
query(lastPurchasePrice, "Last purchase price")
)) as any;
@@ -16,9 +19,28 @@ export const lastPurchase = async () => {
};
}
if (includePlantToken) {
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return data.data;
}
return {
success: true,
message: `Last purchase price for all av in the last 5 years`,
data: data.data.map((n: any) => {
return { plantToken: s[0].value, ...n };
}),
};
} else {
return {
success: true,
message: `Last purchase price for all av in the last 5 years`,
data: data.data,
};
}
};

View File

@@ -1,8 +1,15 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { lastSalesPriceCheck } from "../../sqlServer/querys/eom/lastSalesprice.js";
import { format } from "date-fns-tz";
export const lastSales = async (date: string) => {
export const lastSales = async (
date: string,
includePlantToken: boolean = false
) => {
const { data, error } = (await tryCatch(
query(lastSalesPriceCheck.replace("[date]", date), "Last sales price")
)) as any;
@@ -15,9 +22,34 @@ export const lastSales = async (date: string) => {
};
}
if (includePlantToken) {
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return data.data;
}
return {
success: true,
message: `Historical inventory for ${date}`,
data: data.data.map((n: any) => {
return {
plantToken: s[0].value,
...n,
validDate: format(n.validDate, "M/d/yyyy"),
};
}),
};
} else {
return {
success: true,
message: `Last sales price for all av in the last 5 years`,
data: data.data,
data: data.data.map((n: any) => {
return { ...n, validDate: format(n.validDate, "M/d/yyyy") };
}),
};
}
};

View File

@@ -0,0 +1,67 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { lastSalesPriceCheck } from "../../sqlServer/querys/eom/lastSalesprice.js";
import { consumptionCheck } from "../../sqlServer/querys/eom/consumptionCheck.js";
import { format } from "date-fns-tz";
type Consumption = {
startDate: string;
endDate: string;
includePlantToken: boolean;
};
export const getProductionConsumption = async (consumption: Consumption) => {
const { data, error } = (await tryCatch(
query(
consumptionCheck
.replace("[startDate]", consumption.startDate)
.replace("[endDate]", consumption.endDate),
"Last sales price"
)
)) as any;
if (error) {
return {
success: false,
message: "Error getting the last sales price",
data: error,
};
}
if (consumption.includePlantToken) {
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return data.data;
}
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
plantToken: s[0].value,
...n,
Prod_Date: format(n.Prod_Date, "M/d/yyyy"),
};
}),
};
} else {
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
...n,
Prod_Date: format(n.Prod_Date, "M/d/yyyy"),
};
}),
};
}
};

View File

@@ -0,0 +1,66 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { format } from "date-fns-tz";
import { purchased } from "../../sqlServer/querys/eom/purchased.js";
type Consumption = {
startDate: string;
endDate: string;
includePlantToken: boolean;
};
export const getPurchased = async (consumption: Consumption) => {
const { data, error } = (await tryCatch(
query(
purchased
.replace("[startDate]", consumption.startDate)
.replace("[endDate]", consumption.endDate),
"Last sales price"
)
)) as any;
if (error) {
return {
success: false,
message: "Error getting the last sales price",
data: error,
};
}
if (consumption.includePlantToken) {
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return data.data;
}
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
plantToken: s[0].value,
...n,
Received_Date: format(n.Received_Date, "M/d/yyyy"),
};
}),
};
} else {
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
...n,
Received_Date: format(n.Received_Date, "M/d/yyyy"),
};
}),
};
}
};

View File

@@ -0,0 +1,68 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { format } from "date-fns-tz";
import { soldOutItems } from "../../sqlServer/querys/eom/soldOut.js";
type Consumption = {
startDate: string;
endDate: string;
includePlantToken: boolean;
};
export const getSoldItems = async (consumption: Consumption) => {
const { data, error } = (await tryCatch(
query(
soldOutItems
.replace("[startDate]", consumption.startDate)
.replace("[endDate]", consumption.endDate),
"Last sales price"
)
)) as any;
if (error) {
return {
success: false,
message: "Error getting the last sales price",
data: error,
};
}
if (consumption.includePlantToken) {
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return data.data;
}
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
plantToken: s[0].value,
...n,
DeliveryDate: format(n.DeliveryDate, "M/d/yyyy"),
};
}),
};
} else {
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
...n,
DeliveryDate: format(n.DeliveryDate, "M/d/yyyy"),
};
}),
};
}
};

View File

@@ -0,0 +1,68 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { lastSalesPriceCheck } from "../../sqlServer/querys/eom/lastSalesprice.js";
import { consumptionCheck } from "../../sqlServer/querys/eom/consumptionCheck.js";
import { format } from "date-fns-tz";
import { regrindCheck } from "../../sqlServer/querys/eom/regrind.js";
type Consumption = {
startDate: string;
endDate: string;
includePlantToken: boolean;
};
export const getRegrind = async (consumption: Consumption) => {
const { data, error } = (await tryCatch(
query(
regrindCheck
.replace("[startDate]", consumption.startDate)
.replace("[endDate]", consumption.endDate),
"Last sales price"
)
)) as any;
if (error) {
return {
success: false,
message: "Error getting the last sales price",
data: error,
};
}
if (consumption.includePlantToken) {
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return data.data;
}
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
plantToken: s[0].value,
...n,
Buchungsdatum: format(n.Buchungsdatum, "M/d/yyyy"),
};
}),
};
} else {
return {
success: true,
message: `consumption data`,
data: data.data.map((n: any) => {
return {
...n,
Buchungsdatum: format(n.Buchungsdatum, "M/d/yyyy"),
};
}),
};
}
};

View File

@@ -12,8 +12,23 @@ import { shiftChange } from "../sqlServer/querys/misc/shiftChange.js";
import { createLog } from "../logger/logger.js";
import lastPurch from "./route/getLastPurchPrice.js";
import lastSales from "./route/getLastSalesPrice.js";
import gpData from "./route/getGpData.js";
import consumptionData from "./route/getProductionConsumption.js";
import regrind from "./route/getregrind.js";
import soldItems from "./route/getSoldItems.js";
import purchased from "./route/getPurchased.js";
const routes = [stats, history, lastPurch, lastSales] as const;
const routes = [
stats,
history,
lastPurch,
lastSales,
gpData,
consumptionData,
regrind,
soldItems,
purchased,
] as const;
const appRoutes = routes.forEach((route) => {
app.route("/eom", route);

View File

@@ -0,0 +1,51 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { runGPQuery } from "../../sqlServer/gpSqlServer.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets History Data by date.",
method: "get",
path: "/gpData",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const q: any = c.req.queries();
apiHit(c, { endpoint: "/gpData" });
try {
const res = await runGPQuery({
startDate: q["startDate"] ? q["startDate"][0] : "",
endDate: q["endDate"] ? q["endDate"][0] : "",
gpCode: q["gpCode"] ? q["gpCode"] : "",
});
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error getting gp data.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -17,10 +17,12 @@ app.openapi(
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const q: any = c.req.queries();
apiHit(c, { endpoint: "/lastpurchprice" });
try {
const res = await lastPurchase();
const res = await lastPurchase(
q["includePlantToken"] ? true : false
);
return c.json(
{ success: res.success, message: res.message, data: res.data },

View File

@@ -17,12 +17,14 @@ app.openapi(
}),
async (c) => {
//const body = await c.req.json();
const month: string = c.req.query("month") ?? "";
// make sure we have a vaid user being accessed thats really logged in
const q: any = c.req.queries();
apiHit(c, { endpoint: "/lastsalesprice" });
try {
const res = await lastSales(month);
const res = await lastSales(
q["month"] ? q["month"][0] : null,
q["includePlantToken"] ? true : false
);
return c.json(
{ success: res.success, message: res.message, data: res.data },

View File

@@ -0,0 +1,52 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { runGPQuery } from "../../sqlServer/gpSqlServer.js";
import { getProductionConsumption } from "../controller/getProductionConsumption.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets History Data by date.",
method: "get",
path: "/productionconsumption",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const q: any = c.req.queries();
apiHit(c, { endpoint: "/gpData" });
try {
const res = await getProductionConsumption({
startDate: q["startDate"] ? q["startDate"][0] : "",
endDate: q["endDate"] ? q["endDate"][0] : "",
includePlantToken: q["includePlantToken"] ? true : false,
});
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error getting gp data.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,50 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getPurchased } from "../controller/getPurchased.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets History Data by date.",
method: "get",
path: "/purchased",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const q: any = c.req.queries();
apiHit(c, { endpoint: "/regrind" });
try {
const res = await getPurchased({
startDate: q["startDate"] ? q["startDate"][0] : "",
endDate: q["endDate"] ? q["endDate"][0] : "",
includePlantToken: q["includePlantToken"] ? true : false,
});
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error getting gp data.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,54 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { runGPQuery } from "../../sqlServer/gpSqlServer.js";
import { getProductionConsumption } from "../controller/getProductionConsumption.js";
import { getRegrind } from "../controller/getregrind.js";
import { getSoldItems } from "../controller/getSoldItems.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets History Data by date.",
method: "get",
path: "/solditems",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const q: any = c.req.queries();
apiHit(c, { endpoint: "/regrind" });
try {
const res = await getSoldItems({
startDate: q["startDate"] ? q["startDate"][0] : "",
endDate: q["endDate"] ? q["endDate"][0] : "",
includePlantToken: q["includePlantToken"] ? true : false,
});
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error getting gp data.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,53 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { runGPQuery } from "../../sqlServer/gpSqlServer.js";
import { getProductionConsumption } from "../controller/getProductionConsumption.js";
import { getRegrind } from "../controller/getregrind.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets History Data by date.",
method: "get",
path: "/regrind",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const q: any = c.req.queries();
apiHit(c, { endpoint: "/regrind" });
try {
const res = await getRegrind({
startDate: q["startDate"] ? q["startDate"][0] : "",
endDate: q["endDate"] ? q["endDate"][0] : "",
includePlantToken: q["includePlantToken"] ? true : false,
});
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error getting gp data.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -21,11 +21,14 @@ app.openapi(
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const month: string = c.req.query("month") ?? "";
const q: any = c.req.queries();
apiHit(c, { endpoint: "/histinv" });
try {
const res = await historicalInvByDate(month);
const res = await historicalInvByDate(
q["month"] ? q["month"][0] : null,
q["includePlantToken"] ? true : false
);
return c.json(
{ success: res.success, message: res.message, data: res.data },

View File

@@ -85,16 +85,16 @@ export const labelingProcess = async ({
(l: any) => l.MachineID === macId[0]?.HumanReadableId
);
if (filteredLot.length === 0) {
if (!filteredLot || filteredLot.length === 0) {
createLog(
"error",
"labeling",
"ocp",
`There is not a lot assigned to ${line}.`
`There is not a lot assigned to ${zechette.line}.`
);
return {
success: false,
message: `There is not a lot assigned to ${line}.`,
message: `There is not a lot assigned to ${zechette.line}.`,
};
}

View File

@@ -0,0 +1,104 @@
import sql from "mssql";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { db } from "../../../database/dbclient.js";
import { settings } from "../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { format } from "date-fns-tz";
const username = "gpviewer";
const password = "gp$$ViewOnly!";
const sqlGPConfig = {
server: "USMCD1VMS011",
database: `ALPLA`,
user: username,
password: password,
options: {
encrypt: true,
trustServerCertificate: true,
},
requestTimeout: 90000,
};
type GPCheck = {
startDate: string;
endDate: string;
gpCode: string;
};
export const runGPQuery = async (gpCheck: GPCheck) => {
let pool2: sql.ConnectionPool | null = null;
try {
// Create a brand-new pool, not touching the "global" one
pool2 = new sql.ConnectionPool(sqlGPConfig);
await pool2.connect();
const query = `
select * from (
select
case when x.POPRCTNM is null then p.POPRCTNM else p.POPRCTNM end as RCT_Num,
PONUMBER PO,
p.VENDORID Supplier,
ITEMNMBR Item,
QTYSHPPD shipped,
UOFM Type,
TRXLOCTN Location,
case when CONVERT(DATE, x.receiptdate) is null then convert(date, p.DATERECD) else CONVERT(DATE, x.receiptdate) end as Date_Recived
from ALPLA.dbo.pop10500 (nolock) as p
left join
ALPLA.dbo.POP10300 as x on p.POPRCTNM = x.POPRCTNM
WHERE TRXLOCTN LIKE '[gpCode]%' and p.POPTYPE = 1) a
where Date_Recived BETWEEN '[startDate]' AND '[endDate]'
`;
const { data: s, error: se } = (await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
)) as any;
if (se) {
console.log("Error getting articles");
return se;
}
const result = await pool2
.request()
.query(
query
.replace("[startDate]", gpCheck.startDate)
.replace("[endDate]", gpCheck.endDate)
.replace("[gpCode]", gpCheck.gpCode)
);
return {
success: true,
message: "GP data",
data: result.recordset.map((n: any) => {
return {
plantToken: s[0].value,
...n,
RCT_Num: n.RCT_Num.trim(),
PO: n.PO.trim(),
Supplier: n.Supplier.trim(),
Item: n.Item.trim(),
article:
n.Item.split("-").length > 1
? n.Item.split("-")[1].trim()
: "No article",
Type: n.Type.trim(),
Location: n.Location.trim(),
Date_Recived: format(n.Date_Recived, "M/d/yyyy"),
};
}),
};
} catch (error) {
console.log(error);
return {
success: false,
message: "Error Getting GP data",
data: error,
};
} finally {
if (pool2) await pool2.close(); // Always close the pool
}
};

View File

@@ -0,0 +1,7 @@
export const consumptionCheck = `
SELECT IdArtikelvarianten AS AV,
Menge AS Quantity,
CONVERT(DATE, BuchDatum) AS Prod_Date
FROM alplaprod_test1.dbo.T_LBW (nolock)
WHERE BuchDatum BETWEEN '[startDate]' AND '[endDate]' ORDER BY BuchDatum DESC
`;

View File

@@ -0,0 +1,42 @@
export const purchased = `
use AlplaPROD_test1
declare @start_date nvarchar(30) = '[startDate] '
declare @end_date nvarchar(30) = '[endDate] '
select T_Wareneingaenge.IdBestellung AS Purchase_order,
T_Adressen.IdAdressen,
T_Adressen.Bezeichnung,
T_Wareneingaenge.IdArtikelVarianten AS AV,
V_Artikel.Alias,
x.Bemerkung AS Remark,
T_Wareneingaenge.Bemerkung AS Purchase_Remark,
x.Add_User,
CONVERT(DATE, x.Add_Date) AS Received_Date,
x.IdWareneingangPlanung,
T_Wareneingaenge.SollMenge As Ordered_QTY,
x.EntladeMenge As Received_QTY,
case when T_Adressen.Bezeichnung LIKE '%Alpla%' Then 'AlplaPlant' Else 'Supplier' End AS
Supplier,
x.Typ as incoming_goods_type
from dbo.T_WareneingangPlanungen (nolock) as x
join
dbo.T_Wareneingaenge (nolock) on
x.IdWareneingang=
dbo.T_Wareneingaenge.IdWareneingang
join
dbo.V_Artikel (nolock) on
dbo.T_Wareneingaenge.IdArtikelVarianten=
dbo.V_Artikel.IdArtikelvarianten
join
dbo.T_Adressen (nolock) on dbo.T_Wareneingaenge.IdLieferantAdresse =
dbo.T_Adressen.IdAdressen
where x.add_date between @start_date + (select top(1) CONVERT(char(8), StartDate, 108) as startTime from [test1_AlplaPROD2.0_Read].masterData.ShiftDefinition (nolock) where TeamNumber = 1)
AND @end_date + (select top(1) CONVERT(char(8), StartDate, 108) as startTime from [test1_AlplaPROD2.0_Read].masterData.ShiftDefinition (nolock) where TeamNumber = 1)
order by x.add_date desc
`;

View File

@@ -0,0 +1,15 @@
export const regrindCheck = `
select IdArtikelVarianten,
ArtikelVariantenAlias,
IdRezeptur,
Menge,
IdBuchungsGrund,
Buchungsdatum,
ProduktionsLos,
IdReinheit,
ReinheitBez, HerkunftBez
from alplaprod_test1.[dbo].[V_AbfallLagerBuchungen] (nolock)
where Buchungsdatum between '[startDate] ' + (select top(1) CONVERT(char(8), StartDate, 108) as startTime from [test1_AlplaPROD2.0_Read].masterData.ShiftDefinition (nolock) where TeamNumber = 1)
and '[endDate] ' + (select top(1) CONVERT(char(8), StartDate, 108) as startTime from [test1_AlplaPROD2.0_Read].masterData.ShiftDefinition (nolock) where TeamNumber = 1)
and IdBuchungsGrund in (140, 240) and BuchungsTyp = 1
`;

View File

@@ -0,0 +1,17 @@
export const soldOutItems = `
select IdArtikelVarianten AS AV,
ArtikelVariantenAlias AS AVDescription,
convert(date,AbrufLadeDatum,23) As DeliveryDate,
idlieferadresse AS DeliveryAddress,
LieferAdressBez,
AuftragsNummer AS PO_Number,
IdAuftragsPosition AS LineITEM,
IdAuftragsAbruf AS ReleaseNumber,
AbrufMengeVPK AS PalletsRequested,
AbrufMenge AS PiecesRequested,
GelieferteMengeVPK AS DeliveredPallets,
GelieferteMenge AS DeliveredQTY,
case when LieferAdressBez Like '%alpla%' Then 'AlplaPlant' ELSE 'Customer' End as CustomerType
from alplaprod_test1.dbo.V_TrackerAuftragsAbrufe (nolock)
where AbrufLadeDatum between '[startDate]' and '[endDate]'`;

View File

@@ -0,0 +1,21 @@
CREATE TABLE "serverData" (
"server_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text NOT NULL,
"serverDNS" text,
"plantToken" text,
"ipAddress" text,
"greatPlainsPlantCode" numeric,
"streetAddress" text,
"cityState" text,
"zipcode" numeric,
"contactEmail" text,
"contactPhone" text,
"customerTiAcc" text,
"lstServerPort" numeric,
"active" boolean DEFAULT true,
"serverLoc" text,
"lastUpdated" timestamp DEFAULT now(),
"isUpgrading" boolean DEFAULT false
);
--> statement-breakpoint
CREATE UNIQUE INDEX "plantToken" ON "serverData" USING btree ("plantToken");

View File

@@ -0,0 +1,9 @@
ALTER TABLE "serverData" ALTER COLUMN "serverDNS" SET NOT NULL;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "plantToken" SET NOT NULL;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "ipAddress" SET NOT NULL;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "greatPlainsPlantCode" SET DATA TYPE integer;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "greatPlainsPlantCode" SET NOT NULL;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "zipcode" SET DATA TYPE integer;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "lstServerPort" SET DATA TYPE integer;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "lstServerPort" SET NOT NULL;--> statement-breakpoint
ALTER TABLE "serverData" ALTER COLUMN "serverLoc" SET NOT NULL;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -71,6 +71,20 @@
"when": 1758891252758,
"tag": "0009_cultured_slayback",
"breakpoints": true
},
{
"idx": 10,
"version": "7",
"when": 1759193957132,
"tag": "0010_watery_gravity",
"breakpoints": true
},
{
"idx": 11,
"version": "7",
"when": 1759195276875,
"tag": "0011_careless_banshee",
"breakpoints": true
}
]
}

132
package-lock.json generated
View File

@@ -12,6 +12,7 @@
"@dotenvx/dotenvx": "^1.49.0",
"@tanstack/react-table": "^8.21.3",
"@types/cors": "^2.8.19",
"axios": "^1.12.2",
"better-auth": "^1.3.9",
"cors": "^2.8.5",
"date-fns": "^4.1.0",
@@ -3438,6 +3439,12 @@
"node": ">=12.0.0"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/at-least-node": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz",
@@ -3457,6 +3464,17 @@
"node": ">=8.0.0"
}
},
"node_modules/axios": {
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/balanced-match": {
"version": "1.0.2",
"dev": true,
@@ -3892,6 +3910,18 @@
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==",
"license": "MIT"
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/commander": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz",
@@ -4760,6 +4790,15 @@
"integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==",
"license": "MIT"
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/depd": {
"version": "2.0.0",
"license": "MIT",
@@ -5184,6 +5223,21 @@
"node": ">= 0.4"
}
},
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/esbuild": {
"version": "0.25.9",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz",
@@ -5604,6 +5658,26 @@
"node": ">= 8"
}
},
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/foreground-child": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
@@ -5634,6 +5708,43 @@
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/form-data": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/form-data/node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/form-data/node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/forwarded": {
"version": "0.2.0",
"license": "MIT",
@@ -6072,6 +6183,21 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"license": "MIT",
@@ -8294,6 +8420,12 @@
"node": ">= 0.10"
}
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT"
},
"node_modules/pump": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz",

View File

@@ -44,6 +44,7 @@
"@dotenvx/dotenvx": "^1.49.0",
"@tanstack/react-table": "^8.21.3",
"@types/cors": "^2.8.19",
"axios": "^1.12.2",
"better-auth": "^1.3.9",
"cors": "^2.8.5",
"date-fns": "^4.1.0",