Compare commits

...

166 Commits

Author SHA1 Message Date
8943407f27 feat(finaly): the final push before moving all to the new lst 2025-09-19 22:16:47 -05:00
0bbe411db0 feat(docs): added in link to the main docs and to the material xfer 2025-09-19 07:56:43 -05:00
99b3ad633c fix(materials): changes for consuming peices vs units 2025-09-18 11:00:54 -05:00
c892348d19 refactor(manual print): added a min length of 10 characters 2025-09-18 11:00:18 -05:00
360549aaf4 fix(consume material): changes made to remove the rest of the auth needs 2025-09-18 10:59:55 -05:00
bdc1e72fc1 refactor(frontend): comment required now when doing manual labels 2025-09-17 20:02:14 -05:00
4cae96b1ee refactor(detach): just a log to monitor better 2025-09-11 06:42:31 -05:00
1cde8ab2e6 fix(mm query): more changes to the material query to please more plants 2025-09-11 06:42:02 -05:00
03e8378213 fix(inhouse): changes to no longer error 2025-09-11 06:39:36 -05:00
275c93dc79 refactor(detach silo): changes to now show the error in the console in the browser 2025-09-11 06:39:07 -05:00
fbb8c21d5c feat(loreal forecast): added in an email to be sent when we are missing skus 2025-09-11 06:38:36 -05:00
dce93d3de2 ci(release): bump build number to 652 2025-09-11 06:32:08 -05:00
faf4e9f9ab ci(release): bump build number to 651 2025-09-11 06:30:48 -05:00
a9783a7d35 ci(release): bump build number to 650 2025-09-10 11:15:07 -05:00
e9ca6dbbb2 ci(release): bump build number to 649 2025-09-10 11:13:59 -05:00
e996f99400 ci(release): bump build number to 648 2025-09-10 06:59:19 -05:00
3b939ff2d3 ci(release): bump build number to 647 2025-09-09 21:21:08 -05:00
a7ff88025e chore(release): 2.27.0 2025-09-09 21:20:39 -05:00
00899a5b77 fix(materials): more fixes to try and please all plants to use this version and not call me 2025-09-09 21:20:23 -05:00
99ecf52218 fix(label query): fixes to only pull in active layouts 2025-09-09 21:19:39 -05:00
a6920d7cd8 ci(release): bump build number to 645 2025-09-09 17:52:15 -05:00
788efdf4b3 ci(release): bump build number to 644 2025-09-09 17:40:05 -05:00
a2a17beeeb ci(release): bump build number to 643 2025-09-09 17:28:29 -05:00
6c793c66a4 ci(release): bump build number to 642 2025-09-09 15:57:41 -05:00
6918a5a7d4 ci(release): bump build number to 641 2025-09-09 09:12:18 -05:00
ddd9b0e372 ci(release): bump build number to 640 2025-09-09 07:48:24 -05:00
41d921756b ci(release): bump build number to 638 2025-09-08 19:16:44 -05:00
d86ca7d3bf ci(release): bump build number to 636 2025-09-08 13:30:05 -05:00
932a72ba88 fix(db): changes to the user so it deletes correctly 2025-09-07 15:38:27 -05:00
b9c6d0ba57 ci(release): bump build number to 622 2025-09-06 16:33:56 -05:00
592e4c8cd7 ci(release): bump build number to 621 2025-09-06 16:02:36 -05:00
b772666905 ci(release): bump build number to 620 2025-09-06 14:56:05 -05:00
a6edcb71bf ci(release): bump build number to 619 2025-09-06 14:51:26 -05:00
76d929abea ci(release): bump build number to 618 2025-09-06 14:43:44 -05:00
da17defe82 ci(release): bump build number to 617 2025-09-06 14:37:44 -05:00
669f5ca33f ci(release): bump build number to 616 2025-09-06 14:31:51 -05:00
b649dca4c9 ci(release): bump build number to 615 2025-09-06 14:28:44 -05:00
04b8d041f9 ci(release): bump build number to 611 2025-09-06 09:24:09 -05:00
71bc7c29bc ci(release): bump build number to 610 2025-09-06 09:22:13 -05:00
79f1f8f91b fix(article check): corrected the query to not have a specfic plant in it 2025-09-06 09:04:00 -05:00
b677bc1498 refactor(rfid): changes to show all tags vs only 3 if there are more 2025-09-06 09:03:35 -05:00
a53915ad8c fix(material check): split manual material out of the mm to properly catch it 2025-09-06 09:03:11 -05:00
b3ce767b32 fix(bookin): corrections to only show the message on error vs the json 2025-09-06 09:02:42 -05:00
6caa5984e7 fix(eomservice): changes to stop a crash incase the sql returned nothing due to start up 2025-09-06 09:02:09 -05:00
415d2e4a1d fix(register): changes to not give everyone system admin 2025-09-06 09:01:42 -05:00
271cdbdbfa refactor(cosume): changes to allow non logged in users to use this function 2025-09-06 09:01:12 -05:00
796a8dccd2 refactor(silo card): changes to allow viewers to see and able to attach and detach 2025-09-06 09:00:46 -05:00
e03e92c18d ci(build): changes to build then copy to new version being rewritten 2025-09-06 09:00:11 -05:00
dd2e5d04ae ci(release): bump build number to 609 2025-09-06 08:54:39 -05:00
708b57b926 ci(release): bump build number to 608 2025-09-06 08:50:28 -05:00
0444da8bbc ci(release): bump build number to 607 2025-09-06 08:34:18 -05:00
6d49fae16d ci(release): bump build number to 602 2025-09-05 09:55:52 -05:00
be8a44f2dc ci(release): bump build number to 601 2025-09-05 09:52:25 -05:00
a3994a1f69 ci(release): bump build number to 600 2025-09-05 09:45:02 -05:00
00eaf62d4c ci(release): bump build number to 600 2025-09-05 09:44:58 -05:00
6851777faf ci(release): bump build number to 599 2025-09-05 09:43:27 -05:00
f9e97fc224 ci(release): bump build number to 598 2025-09-05 09:43:25 -05:00
4e3c5e4191 ci(release): bump build number to 595 2025-09-05 08:28:25 -05:00
172fd1d5c2 ci(release): bump build number to 594 2025-09-05 08:28:22 -05:00
1094de2ebd ci(release): bump build number to 593 2025-09-05 08:23:59 -05:00
7462b3d90b ci(release): bump build number to 592 2025-09-05 08:23:56 -05:00
2045c13ef2 ci(release): bump build number to 591 2025-09-05 08:22:41 -05:00
5fa9e83d5b ci(release): bump build number to 590 2025-09-05 08:22:39 -05:00
f48c944bd0 ci(release): bump build number to 589 2025-09-05 07:50:36 -05:00
dddd1d422e ci(release): bump build number to 588 2025-09-04 13:12:21 -05:00
8133443ec9 ci(release): bump build number to 587 2025-09-03 10:56:13 -05:00
28c7d30c1a ci(release): bump build number to 586 2025-09-03 10:54:37 -05:00
57966ac9de refactor(mm query): changes to the query to see pkg and materials properly and not duplicates 2025-09-01 11:08:47 -05:00
6910550de7 fix(main material check): corrections to properly ignore pkg during color checks 2025-09-01 11:08:16 -05:00
1f8b8a7248 fix(bookin): corrected the bookin in error response
this could still print a label if the system detects the material properly but some other issue with
alplaprod happened
2025-09-01 11:05:36 -05:00
cfed981928 refactor(sql): articles added in UOM 2025-09-01 11:02:32 -05:00
0efe74d4b1 ci(release): bump build number to 585 2025-09-01 07:31:05 -05:00
c35db2e209 ci(release): bump build number to 584 2025-08-29 19:01:12 -05:00
e1cdeb740b ci(release): bump build number to 583 2025-08-29 18:57:25 -05:00
3c9e75dc3c ci(release): bump build number to 582 2025-08-29 18:32:34 -05:00
64b5b0d248 ci(release): bump build number to 581 2025-08-29 11:37:47 -05:00
668eae9719 ci(release): bump build number to 580 2025-08-29 11:31:22 -05:00
7cc3778506 feat(eom): lastSales, lastPurch added to be pulled with new template 2025-08-29 11:26:26 -05:00
af47c1582e ci(release): bump build number to 579 2025-08-29 08:59:50 -05:00
081572c421 ci(release): bump build number to 578 2025-08-28 11:09:44 -05:00
c6f6ef6262 fix(frontend): typos 2025-08-28 10:56:49 -05:00
34849e15d1 ci(release): bump build number to 577 2025-08-28 10:19:10 -05:00
9bd66942f5 chore(release): 2.26.0 2025-08-28 10:18:13 -05:00
795da35141 ci(release): bump build number to 576 2025-08-28 10:17:15 -05:00
468f933168 fix(eom stats): corrections to the eom inv stuff to be proper tiems 2025-08-28 10:15:41 -05:00
8c296c5b78 test(forklifts): forklift starting process 2025-08-28 10:15:13 -05:00
53ed2c4e6a feat(materials): added in a bigger window on eom transfer lots 2025-08-28 10:14:41 -05:00
0a6ddea8c0 refactor(materials): changes to allow exact and eom transfers 2025-08-27 17:17:32 -05:00
df423192bf refactor(labeling): moved bookin fails inside bookin as it could be off 2025-08-27 17:17:03 -05:00
2f908398bc ci(release): bump build number to 575 2025-08-27 16:08:58 -05:00
bf8203bbee ci(release): bump build number to 574 2025-08-27 15:50:49 -05:00
e92eccf785 ci(release): bump build number to 573 2025-08-27 14:43:04 -05:00
f4f3de49ca fix(material check): alt mm causing issues and utilizing an 80% to just be ok 2025-08-25 18:34:09 -05:00
788d6367a3 refactor(labeling): removed the wrong import 2025-08-25 15:47:18 -05:00
24a97afe06 fix(fake edi): removed console log 2025-08-25 14:40:21 -05:00
37f82a9710 refactor(tms intergration): corrected how we added gl coding 2025-08-25 14:39:54 -05:00
369d16018c fix(bookin): corrected the error received from the book in fail 2025-08-25 14:38:00 -05:00
68901a857a fix(dm): corrected the remark section so its properly sent over 2025-08-25 14:09:13 -05:00
171763184c ci(release): bump build number to 572 2025-08-25 13:58:47 -05:00
b7de2a8dbe ci(release): bump build number to 571 2025-08-25 13:15:06 -05:00
e78083f496 ci(release): bump build number to 570 2025-08-25 12:18:17 -05:00
3f3b64bf2b ci(release): bump build number to 569 2025-08-25 12:15:27 -05:00
123bb127e8 ci(release): bump build number to 568 2025-08-21 07:33:43 -05:00
8d6ead3aa1 fix(produser): changes to include DM 2025-08-21 05:55:41 -05:00
3148aa79d7 refactor(materials): changes for permissions on material consume 2025-08-21 05:55:20 -05:00
4486fe2436 fix(ocp): zechetti type correction to include the printer name 2025-08-21 05:54:48 -05:00
0ba338d480 feat(rfid): new check to remove tags that have been at a line longer than 6 hours 2025-08-21 05:18:49 -05:00
846ac479b1 fix(transferlots): missed adding this 2025-08-21 05:14:03 -05:00
73d38ba3fe refactor(notifcations): changed hour to min in ti intergrations 2025-08-21 05:13:34 -05:00
27586e923a feat(ocp): zechetti 1 added in 2025-08-21 05:13:06 -05:00
662a951b98 feat(tms): a clean up function was added to remove releases added as blockers older than 45d 2025-08-21 05:10:25 -05:00
0c54cecbd4 ci(release): bump build number to 567 2025-08-20 20:51:22 -05:00
bcf5378966 ci(release): bump build number to 566 2025-08-20 20:04:51 -05:00
c5668e6cf1 chore(release): 2.25.0 2025-08-20 20:04:05 -05:00
213814b868 ci(release): bump build number to 565 2025-08-20 19:02:42 -05:00
7d5b0c46c1 ci(release): bump build number to 564 2025-08-19 17:37:40 -05:00
595e22e8e9 ci(release): bump build number to 563 2025-08-19 17:04:13 -05:00
a6f18554b8 ci(release): bump build number to 562 2025-08-19 17:00:13 -05:00
88f61c8eaa feat(ocp): materials contorls and transfer to next lot logic 2025-08-19 16:00:49 -05:00
a183279268 ci(release): bump build number to 561 2025-08-17 18:10:25 -05:00
5156c8bf7b ci(release): bump build number to 560 2025-08-14 09:32:25 -05:00
4669ff95dc ci(release): bump build number to 559 2025-08-13 22:52:44 -05:00
ed93992165 ci(release): bump build number to 558 2025-08-13 16:32:23 -05:00
f16e2bf53b ci(release): bump build number to 557 2025-08-13 15:50:35 -05:00
a84998438c ci(release): bump build number to 556 2025-08-13 11:33:45 -05:00
83469105f0 ci(release): bump build number to 555 2025-08-12 17:53:08 -05:00
835ae58f04 ci(release): bump build number to 554 2025-08-12 16:26:39 -05:00
1498a19121 ci(release): bump build number to 553 2025-08-12 14:28:42 -05:00
ca96849991 ci(release): bump build number to 552 2025-08-12 13:55:49 -05:00
27b37f9849 ci(release): bump build number to 551 2025-08-12 11:24:50 -05:00
7f81a2e09a ci(release): bump build number to 550 2025-08-11 12:24:30 -05:00
e14abd3644 ci(release): bump build number to 549 2025-08-11 08:22:44 -05:00
2ff7b9baf9 refactor(migrations): not needed but we have it and needed to correct the settings 2025-08-10 18:15:07 -05:00
8145dc800d refactor(siloadjustment): refactored to get the settings from the state vs direct from db 2025-08-10 18:14:38 -05:00
6ccf500e5e feat(prodrole): added in planner role 2025-08-10 18:14:08 -05:00
103171c924 refactor(notifications): refactored the cron job system so we can utilize outside the service 2025-08-10 18:13:32 -05:00
2eb6fa7794 fix(gotransport): error handling so we dont get spammed with errors 2025-08-10 18:12:52 -05:00
397f1da595 fix(inv query): error in improper placed , in the query 2025-08-10 18:12:29 -05:00
8d63f7f6b0 feat(psi): psi querys added and av grab right now 2025-08-10 18:11:57 -05:00
52345bc94c feat(eom): added in hostorical data and deletion for data over 45 days 2025-08-10 18:11:16 -05:00
a8a1c1d7fb ci(release): bump build number to 548 2025-08-09 15:05:59 -05:00
83ff2641f3 ci(release): bump build number to 547 2025-08-09 15:02:21 -05:00
7c48f608bc ci(release): bump build number to 546 2025-08-09 14:58:02 -05:00
1802b9ba4e ci(release): bump build number to 545 2025-08-09 14:41:23 -05:00
67a12ccc5c ci(release): bump build number to 544 2025-08-09 14:28:17 -05:00
15e2a65cbb ci(release): bump build number to 543 2025-08-07 21:18:09 -05:00
9e5577e6bb feat(dm): changes to have a default time if nothing is passed in the excel 2025-08-06 15:25:45 -05:00
c52e2a8671 ci(release): bump build number to 542 2025-08-06 15:19:54 -05:00
8f76d6998c ci(release): bump build number to 541 2025-08-06 15:08:13 -05:00
e209686d3c ci(release): bump build number to 540 2025-08-05 14:50:05 -05:00
803c963f96 fix(https fixes): made it so the settings can be grabbed via https 2025-08-05 14:45:04 -05:00
078f35626b ci(release): bump build number to 539 2025-08-05 14:16:26 -05:00
2288884829 fix(inv cards): correction to properly display the names 2025-08-04 18:12:32 -05:00
69fc7418c9 ci(release): bump build number to 538 2025-08-04 14:41:53 -05:00
a0179a41ba refactor(migration progress): moved to start looking at the go backedn 2025-08-04 12:48:49 -05:00
a36552fd9b fix(dm page): correction to the insturcitons 2025-08-04 12:48:04 -05:00
c7bb12822b fix(dyco): correction to disable the camera if ocme is off 2025-08-04 12:44:03 -05:00
98a5ca7bb8 ci(release): bump build number to 537 2025-08-04 09:48:20 -05:00
74ac2864c9 ci(release): bump build number to 536 2025-08-04 07:56:50 -05:00
1f93eca561 ci(release): bump build number to 535 2025-08-04 06:54:23 -05:00
1f44750346 ci(release): bump build number to 534 2025-07-31 11:58:56 -05:00
0d17fef1a1 fix(mainmaterial check): if the machine dose not require mm to be staged properly ignore 2025-07-31 11:54:40 -05:00
f574645b44 ci(release): bump build number to 533 2025-07-30 19:37:13 -05:00
998e84f564 feat(migration start): this starts the migration of all settings to look at the go backend vs this 2025-07-30 14:34:15 -05:00
3e51ebc18b ci(release): bump build number to 532 2025-07-29 21:11:53 -05:00
8129dbb787 ci(release): bump build number to 531 2025-07-29 20:51:27 -05:00
114 changed files with 10863 additions and 621 deletions

1
.gitignore vendored
View File

@@ -157,3 +157,4 @@ backend-0.1.3.zip
BulkForecastTemplate
BulkOrdersTemplate
check.json
server/services/ocp/controller/materials/materialcheck.bak

8
.includes Normal file
View File

@@ -0,0 +1,8 @@
database
dist
frontend/dist
CHANGELOG.md
drizzle.config.ts
package.json
package-lock.json
README.md

View File

@@ -1,5 +1,104 @@
# All CHanges to LST can be found below.
## [2.27.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.26.0...v2.27.0) (2025-09-10)
### 🌟 Enhancements
* **eom:** lastSales, lastPurch added to be pulled with new template ([7cc3778](https://git.tuffraid.net/cowch/lstV2/commits/7cc3778506fc92392ca8431aee0edb203861e10d))
### 🛠️ Code Refactor
* **cosume:** changes to allow non logged in users to use this function ([271cdbd](https://git.tuffraid.net/cowch/lstV2/commits/271cdbdbfa2478ecf56e9b01a4474508acacda2e))
* **mm query:** changes to the query to see pkg and materials properly and not duplicates ([57966ac](https://git.tuffraid.net/cowch/lstV2/commits/57966ac9de72543014451b7ccd75539296ccaa51))
* **rfid:** changes to show all tags vs only 3 if there are more ([b677bc1](https://git.tuffraid.net/cowch/lstV2/commits/b677bc14981faff30c91f6ffe4602319dd3c6016))
* **silo card:** changes to allow viewers to see and able to attach and detach ([796a8dc](https://git.tuffraid.net/cowch/lstV2/commits/796a8dccd2807890abdff7c8dacf8b2246eb265e))
* **sql:** articles added in UOM ([cfed981](https://git.tuffraid.net/cowch/lstV2/commits/cfed981928a56389e09ef428c43ceabc1caec28e))
### 🐛 Bug fixes
* **article check:** corrected the query to not have a specfic plant in it ([79f1f8f](https://git.tuffraid.net/cowch/lstV2/commits/79f1f8f91ba33533de7f4a7cc91503cfd8dd4ce5))
* **bookin:** corrected the bookin in error response ([1f8b8a7](https://git.tuffraid.net/cowch/lstV2/commits/1f8b8a7248c11c7e264c8c5ae7c042c5a0878c46))
* **bookin:** corrections to only show the message on error vs the json ([b3ce767](https://git.tuffraid.net/cowch/lstV2/commits/b3ce767b323c990c0ccf35ad6c2c67136a27272e))
* **db:** changes to the user so it deletes correctly ([932a72b](https://git.tuffraid.net/cowch/lstV2/commits/932a72ba884673471f0056e721cc3f2c8e34b4f3))
* **eomservice:** changes to stop a crash incase the sql returned nothing due to start up ([6caa598](https://git.tuffraid.net/cowch/lstV2/commits/6caa5984e7a3e7b48b119c176835663ffec71151))
* **frontend:** typos ([c6f6ef6](https://git.tuffraid.net/cowch/lstV2/commits/c6f6ef626295f452cdf26f6776b74cfb3b1a10f5))
* **label query:** fixes to only pull in active layouts ([99ecf52](https://git.tuffraid.net/cowch/lstV2/commits/99ecf52218556e048ba9262e74f9b3d020dea31d))
* **main material check:** corrections to properly ignore pkg during color checks ([6910550](https://git.tuffraid.net/cowch/lstV2/commits/6910550de769dce04b1045f96ab19cf7b8d1ef8c))
* **material check:** split manual material out of the mm to properly catch it ([a53915a](https://git.tuffraid.net/cowch/lstV2/commits/a53915ad8cbec5bd8d6ba4643c460ad0162249e2))
* **materials:** more fixes to try and please all plants to use this version and not call me ([00899a5](https://git.tuffraid.net/cowch/lstV2/commits/00899a5b778eab792b350a0b47589de1524d91c8))
* **register:** changes to not give everyone system admin ([415d2e4](https://git.tuffraid.net/cowch/lstV2/commits/415d2e4a1d851cc46ac64ffc814a280a02293bbc))
## [2.26.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.25.0...v2.26.0) (2025-08-28)
### 🛠️ Code Refactor
* **labeling:** moved bookin fails inside bookin as it could be off ([df42319](https://git.tuffraid.net/cowch/lstV2/commits/df423192bfc5e2389872147e92b7d22e648a2927))
* **labeling:** removed the wrong import ([788d636](https://git.tuffraid.net/cowch/lstV2/commits/788d6367a380a81722a76bd6adbe18831d88eeb5))
* **materials:** changes for permissions on material consume ([3148aa7](https://git.tuffraid.net/cowch/lstV2/commits/3148aa79d73823499824cc0601fcabec97bb4f9d))
* **materials:** changes to allow exact and eom transfers ([0a6ddea](https://git.tuffraid.net/cowch/lstV2/commits/0a6ddea8c0d2773aba00266df7e2839879d10cb1))
* **notifcations:** changed hour to min in ti intergrations ([73d38ba](https://git.tuffraid.net/cowch/lstV2/commits/73d38ba3fe7ab4d5ca3a4a1e1c99fdd7bf5e92dc))
* **tms intergration:** corrected how we added gl coding ([37f82a9](https://git.tuffraid.net/cowch/lstV2/commits/37f82a9710cf20e6d0d056893c9da43c70b9e619))
### 🌟 Enhancements
* **materials:** added in a bigger window on eom transfer lots ([53ed2c4](https://git.tuffraid.net/cowch/lstV2/commits/53ed2c4e6a0d3fafafbcb655b5d06cff6324363d))
* **ocp:** zechetti 1 added in ([27586e9](https://git.tuffraid.net/cowch/lstV2/commits/27586e923a106f2b8dd804e9c8292edd5d009cf0))
* **rfid:** new check to remove tags that have been at a line longer than 6 hours ([0ba338d](https://git.tuffraid.net/cowch/lstV2/commits/0ba338d48037f7def74196ca3f41de5807e2cb31))
* **tms:** a clean up function was added to remove releases added as blockers older than 45d ([662a951](https://git.tuffraid.net/cowch/lstV2/commits/662a951b9871d5dfc21f01a76ba23be77b475757))
### 📝 Testing Code
* **forklifts:** forklift starting process ([8c296c5](https://git.tuffraid.net/cowch/lstV2/commits/8c296c5b783f42f85c30a955d6e9d540c50dcfbe))
### 🐛 Bug fixes
* **bookin:** corrected the error received from the book in fail ([369d160](https://git.tuffraid.net/cowch/lstV2/commits/369d16018c8be9fdb936afbc3749b3f9fcea58c8))
* **dm:** corrected the remark section so its properly sent over ([68901a8](https://git.tuffraid.net/cowch/lstV2/commits/68901a857ae5161f024da9cdefc0a981a4907205))
* **eom stats:** corrections to the eom inv stuff to be proper tiems ([468f933](https://git.tuffraid.net/cowch/lstV2/commits/468f933168cc40be0c1b159c31948a2c97600390))
* **fake edi:** removed console log ([24a97af](https://git.tuffraid.net/cowch/lstV2/commits/24a97afe064d5c44872e24e6e3299ed5f2977a78))
* **material check:** alt mm causing issues and utilizing an 80% to just be ok ([f4f3de4](https://git.tuffraid.net/cowch/lstV2/commits/f4f3de49cae277ffa158116d1802495b27fa8e75))
* **ocp:** zechetti type correction to include the printer name ([4486fe2](https://git.tuffraid.net/cowch/lstV2/commits/4486fe24362b4811d7cff0467c7f2f85e0c9e3c4))
* **produser:** changes to include DM ([8d6ead3](https://git.tuffraid.net/cowch/lstV2/commits/8d6ead3aa11aed95ffa3537fbe72cfa9bbceb380))
* **transferlots:** missed adding this ([846ac47](https://git.tuffraid.net/cowch/lstV2/commits/846ac479b1bf211a7891ae362525ea14580ff0cc))
## [2.25.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.24.1...v2.25.0) (2025-08-21)
### 🐛 Bug fixes
* **dm page:** correction to the insturcitons ([a36552f](https://git.tuffraid.net/cowch/lstV2/commits/a36552fd9b9c77f8ecee8b36f45e613383841f95))
* **dyco:** correction to disable the camera if ocme is off ([c7bb128](https://git.tuffraid.net/cowch/lstV2/commits/c7bb12822b13c0c1c929d2c8a9ab150cd0feeff2))
* **gotransport:** error handling so we dont get spammed with errors ([2eb6fa7](https://git.tuffraid.net/cowch/lstV2/commits/2eb6fa77946d5f8572ffc5baa47da602482bce2b))
* **https fixes:** made it so the settings can be grabbed via https ([803c963](https://git.tuffraid.net/cowch/lstV2/commits/803c963f964f26095b2aa6a7d0a60e03615d4c17))
* **inv cards:** correction to properly display the names ([2288884](https://git.tuffraid.net/cowch/lstV2/commits/22888848291aa3df4ebbdd224656731fbb305fa7))
* **inv query:** error in improper placed , in the query ([397f1da](https://git.tuffraid.net/cowch/lstV2/commits/397f1da595fd8a1e1c2a630a3650eb8715604c82))
* **mainmaterial check:** if the machine dose not require mm to be staged properly ignore ([0d17fef](https://git.tuffraid.net/cowch/lstV2/commits/0d17fef1a1c75dd0f27988fd2e3527b508b915cb))
### 🛠️ Code Refactor
* **migration progress:** moved to start looking at the go backedn ([a0179a4](https://git.tuffraid.net/cowch/lstV2/commits/a0179a41bac93d2a7320802e9d70aa966ed79ae4))
* **migrations:** not needed but we have it and needed to correct the settings ([2ff7b9b](https://git.tuffraid.net/cowch/lstV2/commits/2ff7b9baf9ca288f8a33bec3ab1a2ba331ace6b9))
* **notifications:** refactored the cron job system so we can utilize outside the service ([103171c](https://git.tuffraid.net/cowch/lstV2/commits/103171c924a9de78b0a7600abb455fdd6f4bfea1))
* **siloadjustment:** refactored to get the settings from the state vs direct from db ([8145dc8](https://git.tuffraid.net/cowch/lstV2/commits/8145dc800dced31860a926c80eca72cb39433b29))
### 🌟 Enhancements
* **dm:** changes to have a default time if nothing is passed in the excel ([9e5577e](https://git.tuffraid.net/cowch/lstV2/commits/9e5577e6bb4ff3b6c4004288e177fbab322a4b44))
* **eom:** added in hostorical data and deletion for data over 45 days ([52345bc](https://git.tuffraid.net/cowch/lstV2/commits/52345bc94c9e8abc82150fb371a9ba0d0757f16a))
* **migration start:** this starts the migration of all settings to look at the go backend vs this ([998e84f](https://git.tuffraid.net/cowch/lstV2/commits/998e84f5648148c9a94df7177a3d311e16bf4614))
* **ocp:** materials contorls and transfer to next lot logic ([88f61c8](https://git.tuffraid.net/cowch/lstV2/commits/88f61c8eaa32a581094b04b5e18c654040dbeb92))
* **prodrole:** added in planner role ([6ccf500](https://git.tuffraid.net/cowch/lstV2/commits/6ccf500e5eb82125f7bcd3d764282a4b8a750be9))
* **psi:** psi querys added and av grab right now ([8d63f7f](https://git.tuffraid.net/cowch/lstV2/commits/8d63f7f6b0981d604b628ff2f710b8eb41d32837))
### [2.24.1](https://git.tuffraid.net/cowch/lstV2/compare/v2.24.0...v2.24.1) (2025-07-25)

View File

@@ -1,3 +1,7 @@
# THIS VERSION IS NO LONGER BEING UPDATED PLEASE GO TO THE NEW REPO LINK BELOW
[NEW LST REPO](https://git.tuffraid.net/cowch/lst)
# lstV2
Logistics Support Tool V2

View File

@@ -0,0 +1,10 @@
CREATE TABLE "labelRatio" (
" ratio_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text DEFAULT 'labels',
"autoLabel" integer DEFAULT 0,
"manualLabel" integer DEFAULT 0,
"lastReset" timestamp DEFAULT now()
);
--> statement-breakpoint
ALTER TABLE "invHistoricalData" ADD COLUMN "lot_number" text;--> statement-breakpoint
CREATE UNIQUE INDEX "labelname" ON "labelRatio" USING btree ("name");

View File

@@ -0,0 +1,2 @@
ALTER TABLE "invHistoricalData" ALTER COLUMN "upd_user" SET DEFAULT 'lst';--> statement-breakpoint
ALTER TABLE "invHistoricalData" ALTER COLUMN "upd_date" SET DEFAULT now();

View File

@@ -0,0 +1,3 @@
ALTER TABLE "userRoles" DROP CONSTRAINT "userRoles_user_id_users_user_id_fk";
--> statement-breakpoint
ALTER TABLE "userRoles" ADD CONSTRAINT "userRoles_user_id_users_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("user_id") ON DELETE cascade ON UPDATE no action;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -491,6 +491,27 @@
"when": 1752195894698,
"tag": "0069_chemical_maximus",
"breakpoints": true
},
{
"idx": 70,
"version": "7",
"when": 1754767718941,
"tag": "0070_brief_mephisto",
"breakpoints": true
},
{
"idx": 71,
"version": "7",
"when": 1754768521841,
"tag": "0071_fantastic_old_lace",
"breakpoints": true
},
{
"idx": 72,
"version": "7",
"when": 1757167736042,
"tag": "0072_round_black_knight",
"breakpoints": true
}
]
}

View File

View File

View File

View File

@@ -12,7 +12,7 @@ export const invHistoricalData = pgTable(
"invHistoricalData",
{
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
histDate: date("histDate").notNull(), // what month are we running in should just be the first of current month
histDate: date("histDate").notNull(), // this date should always be yesterday when we post it.
plantToken: text("plantToken"),
article: text("article").notNull(),
articleDescription: text("articleDescription").notNull(),
@@ -21,10 +21,11 @@ export const invHistoricalData = pgTable(
avaliable_QTY: text("avaliable_QTY"),
coa_QTY: text("coa_QTY"),
held_QTY: text("held_QTY"),
lot_Number: text("lot_number"),
consignment: text("consignment"),
location: text("location"),
upd_user: text("upd_user"),
upd_date: timestamp("upd_date"),
upd_user: text("upd_user").default("lst"),
upd_date: timestamp("upd_date").defaultNow(),
}
// (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),

View File

@@ -27,7 +27,7 @@ export const userRoles = pgTable(
{
user_id: uuid("user_id")
.notNull()
.references(() => users.user_id),
.references(() => users.user_id, { onDelete: "cascade" }),
role_id: uuid("role_id")
.notNull()
.references(() => roles.role_id),

View File

@@ -21,6 +21,7 @@
"@radix-ui/react-select": "^2.2.5",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.12",
"@radix-ui/react-tooltip": "^1.2.7",
"@react-pdf/renderer": "^4.3.0",
@@ -2072,6 +2073,41 @@
}
}
},
"node_modules/@radix-ui/react-switch": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.2.6.tgz",
"integrity": "sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==",
"license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.3",
"@radix-ui/react-compose-refs": "1.1.2",
"@radix-ui/react-context": "1.1.2",
"@radix-ui/react-primitive": "2.1.3",
"@radix-ui/react-use-controllable-state": "1.2.2",
"@radix-ui/react-use-previous": "1.1.1",
"@radix-ui/react-use-size": "1.1.1"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-switch/node_modules/@radix-ui/primitive": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz",
"integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==",
"license": "MIT"
},
"node_modules/@radix-ui/react-tabs": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.12.tgz",
@@ -5316,18 +5352,6 @@
"node": ">= 4"
}
},
"node_modules/immer": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/immer/-/immer-10.1.1.tgz",
"integrity": "sha512-s2MPrmjovJcoMaHtx6K11Ra7oD05NT97w1IC5zpMkT6Atjr7H8LjaDd81iIxUYpMKSRRNMJE703M1Fhr/TctHw==",
"license": "MIT",
"optional": true,
"peer": true,
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/immer"
}
},
"node_modules/import-fresh": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",

View File

@@ -25,6 +25,7 @@
"@radix-ui/react-select": "^2.2.5",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.12",
"@radix-ui/react-tooltip": "^1.2.7",
"@react-pdf/renderer": "^4.3.0",

View File

@@ -1,131 +1,9 @@
import {useSessionStore} from "@/lib/store/sessionStore";
import {LstCard} from "../extendedUI/LstCard";
import {Tabs, TabsContent, TabsList, TabsTrigger} from "../ui/tabs";
import {useModuleStore} from "@/lib/store/useModuleStore";
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "../ui/table";
import {Skeleton} from "../ui/skeleton";
import {Link, useRouter} from "@tanstack/react-router";
import {Popover, PopoverContent, PopoverTrigger} from "../ui/popover";
import {Button} from "../ui/button";
import {cn} from "@/lib/utils";
import {CalendarIcon} from "lucide-react";
import {format, startOfMonth} from "date-fns";
import {Calendar} from "../ui/calendar";
import {useState} from "react";
import {toast} from "sonner";
import KFP from "./KFP";
import MaterialCheck from "./materialCheck/MaterialCheck";
export default function EomPage() {
const {modules} = useModuleStore();
const {user} = useSessionStore();
const router = useRouter();
const [date, setDate] = useState<Date>();
if (!user) {
router.navigate({to: "/"});
}
const eomMod = modules.filter((m) => m.name === "eom");
// the users current role for eom is?
const role: any = user?.roles.filter((r) => r.module_id === eomMod[0].module_id) || "";
const tabs = [
{key: "kfp", label: "Key Figures", roles: ["admin", "systemAdmin"], content: <KFP />},
{key: "fg", label: "Finished Goods", roles: ["admin", "systemAdmin"], content: <DummyContent />},
{key: "mm", label: "Main Material", roles: ["admin", "systemAdmin"], content: <DummyContent />},
{key: "mb", label: "Master Batch", roles: ["admin", "systemAdmin"], content: <DummyContent />},
{key: "ab", label: "Additive", roles: ["admin", "systemAdmin"], content: <DummyContent />},
{key: "pp", label: "Purchased Preforms", roles: ["admin", "systemAdmin"], content: <DummyContent />},
{key: "pre", label: "Preforms", roles: ["admin", "systemAdmin"], content: <DummyContent />},
{key: "pkg", label: "Packaging", roles: ["admin", "systemAdmin"], content: <DummyContent />},
{key: "ui", label: "Undefined Items", roles: ["admin"], content: <DummyContent />},
];
return (
<div className="m-2 w-screen">
<div className="mb-2 flex flex-row">
<Popover>
<PopoverTrigger asChild>
<Button
variant={"outline"}
className={cn(
"w-[280px] justify-start text-left font-normal",
!date && "text-muted-foreground"
)}
>
<CalendarIcon className="mr-2 h-4 w-4" />
{date ? format(date, "PPP") : <span>Pick a date</span>}
</Button>
</PopoverTrigger>
<PopoverContent className="w-auto p-0">
<Calendar mode="single" selected={date} onSelect={setDate} initialFocus />
</PopoverContent>
</Popover>
<div className="ml-2">
<Button onClick={() => toast.success(`Getting data for ${startOfMonth(date!)}-${date}`)}>
<span className="text-sm">Update Data</span>
</Button>
</div>
</div>
<Tabs defaultValue="mm">
<TabsList>
{tabs.map((tab) => {
if (tab.roles.includes(role[0].role))
return <TabsTrigger value={tab.key}>{tab.label}</TabsTrigger>;
})}
</TabsList>
{tabs.map((tab) => {
if (tab.roles.includes(role[0].role))
return <TabsContent value={tab.key}>{tab.content}</TabsContent>;
})}
</Tabs>
<MaterialCheck />
</div>
);
}
function DummyContent() {
return (
<LstCard className="w-5/6">
<Table>
<TableHeader>
<TableRow>
<TableHead>Av</TableHead>
<TableHead>Description</TableHead>
<TableHead>Material Type</TableHead>
<TableHead>Waste</TableHead>
<TableHead>Loss / Gain $$</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{Array(10)
.fill(0)
.map((_, i) => (
<TableRow key={i}>
<TableCell className="font-medium m-2">
<Link to="/article/$av" params={{av: `${i}`}}>
{i}
</Link>
</TableCell>
<TableCell className="font-medium">
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
{/* <TableCell>
<Skeleton className="h-4" />
</TableCell> */}
</TableRow>
))}
</TableBody>
</Table>
</LstCard>
);
}

View File

@@ -0,0 +1,3 @@
export default function MaterialCheck() {
return <div>MaterialCheck</div>;
}

View File

@@ -0,0 +1,211 @@
import { LstCard } from "@/components/extendedUI/LstCard";
import { Button } from "@/components/ui/button";
import { Calendar } from "@/components/ui/calendar";
import { Popover, PopoverTrigger } from "@/components/ui/popover";
import { Skeleton } from "@/components/ui/skeleton";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { useSessionStore } from "@/lib/store/sessionStore";
import { useModuleStore } from "@/lib/store/useModuleStore";
import { cn } from "@/lib/utils";
import { PopoverContent } from "@radix-ui/react-popover";
import { Link, useRouter } from "@tanstack/react-router";
import { startOfMonth } from "date-fns";
import { format } from "date-fns-tz";
import { CalendarIcon } from "lucide-react";
import { useState } from "react";
import { toast } from "sonner";
import KFP from "../KFP";
export default function MaterialData() {
const { modules } = useModuleStore();
const { user } = useSessionStore();
const router = useRouter();
const [date, setDate] = useState<Date>();
if (!user) {
router.navigate({ to: "/" });
}
const eomMod = modules.filter((m) => m.name === "eom");
// the users current role for eom is?
const role: any =
user?.roles.filter((r) => r.module_id === eomMod[0].module_id) || "";
const tabs = [
{
key: "kfp",
label: "Key Figures",
roles: ["admin", "systemAdmin"],
content: <KFP />,
},
{
key: "fg",
label: "Finished Goods",
roles: ["admin", "systemAdmin"],
content: <DummyContent />,
},
{
key: "mm",
label: "Main Material",
roles: ["admin", "systemAdmin"],
content: <DummyContent />,
},
{
key: "mb",
label: "Master Batch",
roles: ["admin", "systemAdmin"],
content: <DummyContent />,
},
{
key: "ab",
label: "Additive",
roles: ["admin", "systemAdmin"],
content: <DummyContent />,
},
{
key: "pp",
label: "Purchased Preforms",
roles: ["admin", "systemAdmin"],
content: <DummyContent />,
},
{
key: "pre",
label: "Preforms",
roles: ["admin", "systemAdmin"],
content: <DummyContent />,
},
{
key: "pkg",
label: "Packaging",
roles: ["admin", "systemAdmin"],
content: <DummyContent />,
},
{
key: "ui",
label: "Undefined Items",
roles: ["admin"],
content: <DummyContent />,
},
];
return (
<div className="m-2 w-screen">
<div className="mb-2 flex flex-row">
<Popover>
<PopoverTrigger asChild>
<Button
variant={"outline"}
className={cn(
"w-[280px] justify-start text-left font-normal",
!date && "text-muted-foreground"
)}
>
<CalendarIcon className="mr-2 h-4 w-4" />
{date ? (
format(date, "PPP")
) : (
<span>Pick a date</span>
)}
</Button>
</PopoverTrigger>
<PopoverContent className="w-auto p-0">
<Calendar
mode="single"
selected={date}
onSelect={setDate}
initialFocus
/>
</PopoverContent>
</Popover>
<div className="ml-2">
<Button
onClick={() =>
toast.success(
`Getting data for ${startOfMonth(date!)}-${date}`
)
}
>
<span className="text-sm">Update Data</span>
</Button>
</div>
</div>
<Tabs defaultValue="mm">
<TabsList>
{tabs.map((tab) => {
if (tab.roles.includes(role[0].role))
return (
<TabsTrigger value={tab.key}>
{tab.label}
</TabsTrigger>
);
})}
</TabsList>
{tabs.map((tab) => {
if (tab.roles.includes(role[0].role))
return (
<TabsContent value={tab.key}>
{tab.content}
</TabsContent>
);
})}
</Tabs>
</div>
);
}
function DummyContent() {
return (
<LstCard className="w-5/6">
<Table>
<TableHeader>
<TableRow>
<TableHead>Av</TableHead>
<TableHead>Description</TableHead>
<TableHead>Material Type</TableHead>
<TableHead>Waste</TableHead>
<TableHead>Loss / Gain $$</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{Array(10)
.fill(0)
.map((_, i) => (
<TableRow key={i}>
<TableCell className="font-medium m-2">
<Link
to="/article/$av"
params={{ av: `${i}` }}
>
{i}
</Link>
</TableCell>
<TableCell className="font-medium">
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
{/* <TableCell>
<Skeleton className="h-4" />
</TableCell> */}
</TableRow>
))}
</TableBody>
</Table>
</LstCard>
);
}

View File

@@ -6,7 +6,10 @@ import Barcode from "react-barcode";
import { BarcodePDFExport } from "./BarcodeExport";
import { BulkBarcodePDFExport } from "./BulkExport";
const commoncmd = [{ name: "Relocate", commandId: 33 }];
const commoncmd = [
{ name: "Relocate", commandId: 33 },
//-{ name: "Stock in", commandId: 22 },
];
export default function CommonCommands() {
const [checked, setChecked] = useState([]);

View File

@@ -26,7 +26,7 @@ export default function DmPage() {
<ul className="list-disc mr-2">
<li>
Download the standard template if you have not yet done
so, top right click standard, then template.
so, Above click Standard Order Template.
</li>
<li>
Add in the orders like you see in the example below.
@@ -37,8 +37,8 @@ export default function DmPage() {
customerReleaseNumber. Quatity and dates can change.
</li>
<li>
Once you have all the orders enters click the upload
button on the top right
Once you have all the orders entered, click Standard
Order Import
</li>
</ul>

View File

@@ -1,26 +1,30 @@
import {LstCard} from "@/components/extendedUI/LstCard";
import {Button} from "@/components/ui/button";
import {CardHeader} from "@/components/ui/card";
import {Input} from "@/components/ui/input";
import {Label} from "@/components/ui/label";
import {useSessionStore} from "@/lib/store/sessionStore";
import axios from "axios";
import {useState} from "react";
import { LstCard } from "@/components/extendedUI/LstCard";
import { Button } from "@/components/ui/button";
import { CardHeader } from "@/components/ui/card";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {useForm} from "react-hook-form";
import {toast} from "sonner";
import axios from "axios";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
export default function ConsumeMaterial() {
const {register: register1, handleSubmit: handleSubmit1, reset} = useForm();
const {
register: register1,
handleSubmit: handleSubmit1,
reset,
} = useForm();
const [submitting, setSubmitting] = useState(false);
const {token} = useSessionStore();
const handleConsume = async (data: any) => {
setSubmitting(true);
try {
const result = await axios.post(`/api/logistics/consume`, data, {
headers: {Authorization: `Bearer ${token}`},
});
// const result = await axios.post(`/api/logistics/consume`, data, {
// headers: {Authorization: `Bearer ${token}`},
// });
const result = await axios.post(`/api/logistics/consume`, data);
if (result.data.success) {
toast.success(result.data.message);
setSubmitting(false);
@@ -33,12 +37,14 @@ export default function ConsumeMaterial() {
toast.error(result.data.message);
}
} catch (error: any) {
//console.log(error);
console.log(error);
setSubmitting(false);
if (error.status === 401) {
toast.error("Unauthorized to do this task.");
} else {
toast.error("Unexpected error if this continues please constact an admin.");
toast.error(
"Unexpected error if this continues please constact an admin."
);
}
}
};
@@ -53,7 +59,9 @@ export default function ConsumeMaterial() {
<LstCard>
<form onSubmit={handleSubmit1(handleConsume)}>
<div className="m-2">
<Label htmlFor="runningNr">Enter unit running number</Label>
<Label htmlFor="runningNr">
Enter unit running number
</Label>
<Input
className="mt-2"
//defaultValue="634"
@@ -62,7 +70,9 @@ export default function ConsumeMaterial() {
/>
</div>
<div className="m-2">
<Label htmlFor="lotNum">Enter lot number</Label>
<Label htmlFor="lotNum">
Enter lot number
</Label>
<Input
className="mt-2"
//defaultValue="634"
@@ -71,7 +81,12 @@ export default function ConsumeMaterial() {
/>
</div>
<Button className="m-2" color="primary" type="submit" disabled={submitting}>
<Button
className="m-2"
color="primary"
type="submit"
disabled={submitting}
>
Consume materal
</Button>
</form>
@@ -81,12 +96,19 @@ export default function ConsumeMaterial() {
<LstCard>
<div className="w-96 p-1">
<ol>
<li>1. Enter the running number of the material you would like to consume</li>
<li>2. Enter the lot number you will be consuming to</li>
<li>
1. Enter the running number of the
material you would like to consume
</li>
<li>
2. Enter the lot number you will be
consuming to
</li>
<li>3. Press consume material</li>
</ol>
<p className="text-pretty w-96">
*This process is only for barcoded material, if it is set to auto consume you will
*This process is only for barcoded material,
if it is set to auto consume you will
encounter and error.
</p>
</div>

View File

@@ -0,0 +1,478 @@
import { LstCard } from "@/components/extendedUI/LstCard";
import { Button } from "@/components/ui/button";
import { CardContent, CardHeader } from "@/components/ui/card";
import { Input } from "@/components/ui/input";
import { Switch } from "@/components/ui/switch";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { useAppForm } from "@/utils/formStuff";
import axios from "axios";
import { useState } from "react";
import { toast } from "sonner";
import { Info } from "lucide-react";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { useSettingStore } from "@/lib/store/useSettings";
export default function TransferToNextLot() {
const [gaylordFilled, setGaylordFilled] = useState([0]);
const [actualAmount, setActualAmount] = useState(0);
const [tab, setTab] = useState("esitmate");
const [typeSwitch, setTypeSwitch] = useState(false);
const { settings } = useSettingStore();
const server = settings.filter((n: any) => n.name === "dbServer");
const form = useAppForm({
defaultValues: {
runningNumber: "",
lotNumber: "",
originalAmount: "",
amount: "",
},
onSubmit: async ({ value }) => {
//console.log(transferData);
//toast.success("603468: qty: 361, was transfered to lot:24897");
try {
const res = await axios.post("/api/ocp/materiallottransfer", {
runningNumber: Number(value.runningNumber),
lotNumber: Number(value.lotNumber),
originalAmount: Number(value.originalAmount),
level: Number(
gaylordFilled.length === 1
? 0.25
: gaylordFilled.length === 2
? 0.5
: gaylordFilled.length === 3
? 0.75
: gaylordFilled.length === 4 && 0.95
),
amount: actualAmount,
type: typeSwitch ? "eom" : "lot",
});
if (res.data.success) {
toast.success(`${res.data.message}`);
form.reset();
setGaylordFilled([0]);
setActualAmount(0);
}
//console.log(res.data);
if (!res.data.success) {
toast.error(res.data.message);
}
} catch (error) {
if (error) {
console.log(error);
//toast.error(error)
}
}
},
});
return (
<div>
<LstCard>
<CardHeader>
<p className="text-center text-lg">
Material Transfer to Next lot
</p>
</CardHeader>
<div>
<div className="flex flex-wrap m-2 gap-2">
<div className="flex gap-2">
<div>
<LstCard className="">
<Tabs
defaultValue={tab}
onValueChange={setTab}
>
<TabsList>
<TabsTrigger value="esitmate">
Estimate Amount
</TabsTrigger>
<TabsTrigger value="actual">
Actual Amount
</TabsTrigger>
</TabsList>
<TabsContent value="esitmate">
<div className="grid columns-1">
<button
className={`box-border h-16 w-96 border-3 ${
gaylordFilled.includes(
4
)
? " bg-green-500"
: ""
}`}
onClick={() =>
setGaylordFilled([
1, 2, 3, 4,
])
}
>
<p className="text-center">
Almost full - 95%
</p>
</button>
<button
className={`box-border h-16 w-96 border-3 ${
gaylordFilled.includes(
3
)
? " bg-green-500"
: ""
}`}
onClick={() =>
setGaylordFilled([
1, 2, 3,
])
}
>
<p className="text-center">
About full - 75%
</p>
</button>
<button
className={`box-border h-16 w-96 border-3 ${
gaylordFilled.includes(
2
)
? " bg-green-500"
: ""
}`}
onClick={() =>
setGaylordFilled([1, 2])
}
>
<p className="text-center">
Half full - 50%
</p>
</button>
<button
className={`box-border h-16 w-96 border-3 ${
gaylordFilled.includes(
1
)
? " bg-green-500"
: ""
}`}
onClick={() =>
setGaylordFilled(() => [
1,
])
}
>
<p className="text-center">
Almost empty - 25%
</p>
</button>
</div>
<div className="flex justify-end pr-1">
<Button
onClick={() =>
setGaylordFilled([0])
}
>
Reset Gaylord
</Button>
</div>
</TabsContent>
<TabsContent
value="actual"
className="w-96"
>
<CardHeader>
<p>
Enter the total amount of
the cage/gaylord
</p>
</CardHeader>
<CardContent>
<Input
type="number"
//placeholder="35"
onChange={(e) =>
setActualAmount(
Number(
e.target.value
)
)
}
/>
</CardContent>
</TabsContent>
</Tabs>
</LstCard>
</div>
<div>
<div className="w-96">
<LstCard>
<form
onSubmit={(e) => {
e.preventDefault();
form.handleSubmit();
}}
>
<div className="mt-3 p-2">
<form.AppField
name="runningNumber"
children={(field) => (
<field.InputField
label="Running Number"
inputType="number"
required={true}
/>
)}
/>
</div>
<div className="mt-3 p-2">
<form.AppField
name="lotNumber"
children={(field) => (
<field.InputField
label="Lot Number"
inputType="number"
required={true}
/>
)}
/>
{tab !== "actual" && (
<div className="mt-3 p-2">
<form.AppField
name="originalAmount"
children={(
field
) => (
<field.InputField
label="Orignal Quantity"
inputType="number"
required={
true
}
/>
)}
/>
</div>
)}
</div>
<div className="flex justify-between p-2">
<div className="flex items-center space-x-2">
<Switch
checked={typeSwitch}
onCheckedChange={
setTypeSwitch
}
/>
<span>
{typeSwitch ? (
<div className="flex items-center space-x-2">
<span>
"EOM
Transfer"
</span>
<Tooltip>
<TooltipTrigger>
<Info className="h-[16px] w-[16px]" />
</TooltipTrigger>
<TooltipContent>
<p>
Click
the
toggle
if
you
will
be
transfering
at
EOM,
NOTE:
This
will
trigger
the
delayed
transfer.
</p>
</TooltipContent>
</Tooltip>
</div>
) : (
<div className="flex items-center space-x-2">
<span>
"Lot
Transfer"
</span>
<Tooltip>
<TooltipTrigger>
<Info className="h-[16px] w-[16px]" />
</TooltipTrigger>
<TooltipContent>
<p>
Click
the
toggle
if
you
will
be
transfering
at
EOM,
NOTE:
This
will
trigger
the
delayed
transfer.
</p>
</TooltipContent>
</Tooltip>
</div>
)}
</span>
</div>
<form.AppForm>
<form.SubmitButton>
Transfer To Lot
</form.SubmitButton>
</form.AppForm>
</div>
</form>
</LstCard>
</div>
</div>
</div>
<div>
<LstCard className="p-2">
<CardHeader>
<p className="text-center text-lg">
Moving material to the next lot.
</p>
</CardHeader>
{tab !== "actual" ? (
<div>
<ol>
<li>
1. Grab the gaylord running
number from the gaylord at the
line/next to the tschritter
</li>
<li>
2. Grab the next lot number you
are going to be running (or the
one that state no Main material
prepared)
</li>
<li>
3. Enter the total gaylord
weight (this is how much the
gaylord weighed when it came in
from the supplier.)
</li>
<li>
4. *Click the level of the
gaylord (this is just an
estimate to move to the next
lot.)
</li>
<li>
5. type in running number on the
gaylord.
</li>
<li>
6. Type in the new lot number.
</li>
<li>7. Press "Transfer To Lot"</li>
</ol>
<br></br>
<p>
* to reduce the time needed to get
the lot going we will use an
estimate of how full the gaylord is.
</p>
<p>
NOTE: This is not the return
process, this process will just get
the gaylord to the next lot.
</p>
<br />
{settings.length > 0 && (
<p>
For more in depth instructions
please{" "}
<a
href={`https://${server[0].value}.alpla.net/lst/d/docs/ocp/ocp#tranfer-partial-estimated-quantity-to-the-next-lot`}
target="_blank"
>
<em>CLICK HERE</em>
</a>
</p>
)}
</div>
) : (
<div>
<ol>
<li>
1. Grab the gaylord running
number from the gaylord at the
line/next to the tschritter
</li>
<li>
2. Grab the next lot number you
are going to be running (or the
one that state no Main material
prepared)
</li>
<li>
3. Take the gaylord to the scale
and weight it
</li>
<li>
4. Enter the weight of the
gaylord minus the tar weight.
</li>
<li>
5. type in running number on the
gaylord.
</li>
<li>
6. Type in the new lot number.
</li>
<li>7. Press "Transfer To Lot"</li>
</ol>
<br></br>
<p>
NOTE: This is not the return
process, this process will just get
the gaylord to the next lot.
</p>
<br />
{settings.length > 0 && (
<p>
For more in depth instructions
please{" "}
<a
href={`https://${server[0].value}.alpla.net/lst/d/docs/ocp/ocp#tranfer-partial-estimated-quantity-to-the-next-lot`}
target="_blank"
>
<em>CLICK HERE</em>
</a>
</p>
)}
</div>
)}
</LstCard>
</div>
</div>
</div>
</LstCard>
</div>
);
}

View File

@@ -40,7 +40,6 @@ export function AttachSilo(props: any) {
machineId: "",
},
onSubmit: async ({ value }) => {
console.log(value);
try {
const res = await axios.post(
"/api/logistics/attachsilo",

View File

@@ -48,6 +48,7 @@ export function DetachSilo(props: any) {
);
if (res.status === 200) {
console.log(res.data.data);
toast.success(res.data.message);
refetch();

View File

@@ -21,13 +21,28 @@ import { toast } from "sonner";
import ChartData from "./ChartData";
import { AttachSilo } from "./AttachSilo";
import { DetachSilo } from "./DetachSilo";
import { useSessionStore } from "@/lib/store/sessionStore";
import { useModuleStore } from "@/lib/store/useModuleStore";
import { useGetUserRoles } from "@/lib/store/useGetRoles";
export default function SiloCard(data: any) {
const token = localStorage.getItem("auth_token");
const [submitting, setSubmitting] = useState(false);
const { refetch } = useQuery(getStockSilo());
const { user } = useSessionStore();
const { userRoles } = useGetUserRoles();
const { modules } = useModuleStore();
const silo = data.silo;
// roles that can do the silo adjustments
const roles = ["systemAdmin", "technician", "admin", "manager"];
const module = modules.filter((n) => n.name === "logistics");
const accessRoles = userRoles.filter(
(n) => n.module_id === module[0]?.module_id
) as any;
const form = useForm({
defaultValues: {
newLevel: "",
@@ -46,7 +61,7 @@ export default function SiloCard(data: any) {
dataToSubmit,
{ headers: { Authorization: `Bearer ${token}` } }
);
console.log(res.data);
//console.log(res.data);
if (res.data.success) {
toast.success(res.data.message);
@@ -70,6 +85,8 @@ export default function SiloCard(data: any) {
}
},
});
console.log(accessRoles);
return (
<LstCard>
<div className="flex flex-row">
@@ -109,82 +126,98 @@ export default function SiloCard(data: any) {
</ul>
</div>
) : (
<form
onSubmit={(e) => {
e.preventDefault();
e.stopPropagation();
}}
>
<form.Field
name="newLevel"
validators={{
// We can choose between form-wide and field-specific validators
onChange: ({ value }) =>
value.length > 1
? undefined
: "You must enter a value greate than 1",
}}
children={(field) => {
return (
<div className="m-2 min-w-48 max-w-96 p-2">
<div className="flex flex-row">
<Label htmlFor="newLevel">
New level
</Label>
<div>
<Disclaimer />
</div>
</div>
<div className="flex flex-row">
<Input
name={field.name}
value={
field.state.value
}
onBlur={
field.handleBlur
}
type="decimal"
onChange={(e) =>
field.handleChange(
e.target.value
)
}
/>
<Button
className="ml-1"
variant="outline"
type="submit"
onClick={
form.handleSubmit
}
disabled={submitting}
>
{submitting ? (
<span className="w-24">
Submitting...
</span>
) : (
<span className="w-24">
Submit
</span>
)}
</Button>
</div>
<>
{user &&
roles.includes(accessRoles[0]?.role) && (
<form
onSubmit={(e) => {
e.preventDefault();
e.stopPropagation();
}}
>
<form.Field
name="newLevel"
validators={{
// We can choose between form-wide and field-specific validators
onChange: ({ value }) =>
value.length > 1
? undefined
: "You must enter a value greate than 1",
}}
children={(field) => {
return (
<div className="m-2 min-w-48 max-w-96 p-2">
<div className="flex flex-row">
<Label htmlFor="newLevel">
New level
</Label>
<div>
<Disclaimer />
</div>
</div>
<div className="flex flex-row">
<Input
name={
field.name
}
value={
field
.state
.value
}
onBlur={
field.handleBlur
}
type="decimal"
onChange={(
e
) =>
field.handleChange(
e
.target
.value
)
}
/>
<Button
className="ml-1"
variant="outline"
type="submit"
onClick={
form.handleSubmit
}
disabled={
submitting
}
>
{submitting ? (
<span className="w-24">
Submitting...
</span>
) : (
<span className="w-24">
Submit
</span>
)}
</Button>
</div>
{field.state.meta.errors
.length ? (
<em>
{field.state.meta.errors.join(
","
)}
</em>
) : null}
</div>
);
}}
/>
</form>
{field.state.meta
.errors
.length ? (
<em>
{field.state.meta.errors.join(
","
)}
</em>
) : null}
</div>
);
}}
/>
</form>
)}
</>
)}
</div>
</LstCard>

View File

@@ -243,7 +243,8 @@ export default function ManualPrintForm() {
<Textarea
//label="Comments"
placeholder="add more info as needed."
{...register("additionalComments")}
{...(register("additionalComments"),
{ required: true, minLength: 10 })}
/>
</div>

View File

@@ -0,0 +1,29 @@
import * as React from "react"
import * as SwitchPrimitive from "@radix-ui/react-switch"
import { cn } from "@/lib/utils"
function Switch({
className,
...props
}: React.ComponentProps<typeof SwitchPrimitive.Root>) {
return (
<SwitchPrimitive.Root
data-slot="switch"
className={cn(
"peer data-[state=checked]:bg-primary data-[state=unchecked]:bg-input focus-visible:border-ring focus-visible:ring-ring/50 dark:data-[state=unchecked]:bg-input/80 inline-flex h-[1.15rem] w-8 shrink-0 items-center rounded-full border border-transparent shadow-xs transition-all outline-none focus-visible:ring-[3px] disabled:cursor-not-allowed disabled:opacity-50",
className
)}
{...props}
>
<SwitchPrimitive.Thumb
data-slot="switch-thumb"
className={cn(
"bg-background dark:data-[state=unchecked]:bg-foreground dark:data-[state=checked]:bg-primary-foreground pointer-events-none block size-4 rounded-full ring-0 transition-transform data-[state=checked]:translate-x-[calc(100%-2px)] data-[state=unchecked]:translate-x-0"
)}
/>
</SwitchPrimitive.Root>
)
}
export { Switch }

View File

@@ -1,5 +1,6 @@
import ConsumeMaterial from "@/components/logistics/materialHelper/consumption/ConsumeMaterial";
import PreformReturn from "@/components/logistics/materialHelper/consumption/MaterialReturn";
import TransferToNextLot from "@/components/logistics/materialHelper/consumption/TransferToNextLot";
import { createFileRoute } from "@tanstack/react-router";
export const Route = createFileRoute(
@@ -21,10 +22,21 @@ export const Route = createFileRoute(
function RouteComponent() {
const url: string = window.location.host.split(":")[0];
const auth = localStorage.getItem("auth_token");
return (
<div>
<ConsumeMaterial />
{url === "localhost" && <PreformReturn />}
<div className="flex flex-wrap">
{auth ? (
<>
<ConsumeMaterial />
{url === "localhost" && <PreformReturn />}
<TransferToNextLot />
</>
) : (
<>
<ConsumeMaterial />
<TransferToNextLot />
</>
)}
</div>
);
}

View File

@@ -28,6 +28,7 @@ import { useLogout } from "@/hooks/useLogout";
import ExportInventoryData from "@/components/logistics/warehouse/ExportInventoryData";
import { AddCards } from "@/components/dashboard/AddCards";
import DMButtons from "@/components/logistics/dm/DMButtons";
import { useSettingStore } from "@/lib/store/useSettings";
//import { AddCards } from "@/components/dashboard/AddCards";
// same as the layout
@@ -38,6 +39,9 @@ export const Route = createRootRoute({
const { user } = useSessionStore();
const logout = useLogout();
const location = useLocation();
const { settings } = useSettingStore();
const server = settings.filter((n: any) => n.name === "dbServer");
return (
<div className="overflow-hidden">
@@ -61,6 +65,17 @@ export const Route = createRootRoute({
<div className="m-1">
<ModeToggle />
</div>
<div className="mr-1 ml-1">
{settings.length > 0 && (
<a
href={`https://${server[0].value}.alpla.net/lst/d`}
target="_blank"
>
LST - Docs
</a>
)}
</div>
{session ? (
<div className="m-1">
<DropdownMenu>

View File

@@ -33,13 +33,12 @@ function RouteComponent() {
<span className="font-bold">
Authentication Notice:
</span>
To interact with the Alpla prod through this
application, you must use your{" "}
<span className="font-semibold">
Windows login credentials
<span>
The username, email, and password are
only for LST you <em>DO NOT</em>Need to
use Windows username if you do not wish
to.
</span>
. These credentials are used solely for
authentication purposes.
</li>
{/* <li>
<span className="font-bold">

View File

@@ -66,6 +66,7 @@ export function InvTable<TData, TValue>({
});
//console.log(table.getState().sorting);
//console.log(parseInt(style.height.replace("px", "")) - 50);
console.log(info);
return (
<LstCard
className="p-3"
@@ -79,8 +80,9 @@ export function InvTable<TData, TValue>({
<div>
<div className="flex flex-row justify-between">
<p className="text-center text-pretty">
{info.type} {data.length > 0 ? "lanes" : "lane"} older
than: {info.age}, needing to be completed
{info.rowType} {data.length > 0 ? "lanes" : "lane"}{" "}
older than: {info.age}, {data.length} needing to be
completed
</p>
</div>
<ScrollArea className="h-72 rounded-md border m-2">

164
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "lstv2",
"version": "2.24.1",
"version": "2.27.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "lstv2",
"version": "2.24.1",
"version": "2.27.0",
"dependencies": {
"@dotenvx/dotenvx": "^1.45.1",
"@hono/node-server": "^1.14.4",
@@ -29,6 +29,7 @@
"fast-xml-parser": "^5.2.5",
"fs-extra": "^11.3.0",
"jsonwebtoken": "^9.0.2",
"morgan": "^1.10.1",
"mssql": "^11.0.1",
"nodemailer": "^7.0.3",
"nodemailer-express-handlebars": "^7.0.0",
@@ -1548,14 +1549,6 @@
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@petamoriken/float16": {
"version": "3.9.2",
"resolved": "https://registry.npmjs.org/@petamoriken/float16/-/float16-3.9.2.tgz",
"integrity": "sha512-VgffxawQde93xKxT3qap3OH+meZf7VaSB5Sqd4Rqc+FP5alWbpOyan/7tRbOAvynjpG3GpdtAuGU/NdhQpmrog==",
"license": "MIT",
"optional": true,
"peer": true
},
"node_modules/@scalar/core": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/@scalar/core/-/core-0.3.3.tgz",
@@ -2118,6 +2111,24 @@
],
"license": "MIT"
},
"node_modules/basic-auth": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz",
"integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==",
"license": "MIT",
"dependencies": {
"safe-buffer": "5.1.2"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/basic-auth/node_modules/safe-buffer": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
"license": "MIT"
},
"node_modules/bcryptjs": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-3.0.2.tgz",
@@ -3379,6 +3390,15 @@
"node": ">=0.4.0"
}
},
"node_modules/depd": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
"integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/detect-file": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/detect-file/-/detect-file-1.0.0.tgz",
@@ -3716,6 +3736,12 @@
"node": ">=16"
}
},
"node_modules/ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==",
"license": "MIT"
},
"node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
@@ -4227,70 +4253,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/gel": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/gel/-/gel-2.0.1.tgz",
"integrity": "sha512-gfem3IGvqKqXwEq7XseBogyaRwGsQGuE7Cw/yQsjLGdgiyqX92G1xENPCE0ltunPGcsJIa6XBOTx/PK169mOqw==",
"license": "Apache-2.0",
"optional": true,
"peer": true,
"dependencies": {
"@petamoriken/float16": "^3.8.7",
"debug": "^4.3.4",
"env-paths": "^3.0.0",
"semver": "^7.6.2",
"shell-quote": "^1.8.1",
"which": "^4.0.0"
},
"bin": {
"gel": "dist/cli.mjs"
},
"engines": {
"node": ">= 18.0.0"
}
},
"node_modules/gel/node_modules/env-paths": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz",
"integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==",
"license": "MIT",
"optional": true,
"peer": true,
"engines": {
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/gel/node_modules/isexe": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
"integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
"license": "ISC",
"optional": true,
"peer": true,
"engines": {
"node": ">=16"
}
},
"node_modules/gel/node_modules/which": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
"integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
"license": "ISC",
"optional": true,
"peer": true,
"dependencies": {
"isexe": "^3.1.1"
},
"bin": {
"node-which": "bin/which.js"
},
"engines": {
"node": "^16.13.0 || >=18.0.0"
}
},
"node_modules/get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
@@ -6080,6 +6042,37 @@
"node": ">=0.10.0"
}
},
"node_modules/morgan": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.1.tgz",
"integrity": "sha512-223dMRJtI/l25dJKWpgij2cMtywuG/WiUKXdvwfbhGKBhy1puASqXwFzmWZ7+K73vUPoR7SS2Qz2cI/g9MKw0A==",
"license": "MIT",
"dependencies": {
"basic-auth": "~2.0.1",
"debug": "2.6.9",
"depd": "~2.0.0",
"on-finished": "~2.3.0",
"on-headers": "~1.1.0"
},
"engines": {
"node": ">= 0.8.0"
}
},
"node_modules/morgan/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
}
},
"node_modules/morgan/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@@ -6236,6 +6229,27 @@
"node": ">=14.0.0"
}
},
"node_modules/on-finished": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
"integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==",
"license": "MIT",
"dependencies": {
"ee-first": "1.1.1"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/on-headers": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz",
"integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -7396,7 +7410,7 @@
"version": "1.8.2",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.2.tgz",
"integrity": "sha512-AzqKpGKjrj7EM6rKVQEPpB288oCfnrEIuyoT9cyF4nmGa7V8Zk6f7RRqYisX8X9m+Q7bd632aZW4ky7EhbQztA==",
"devOptional": true,
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"

View File

@@ -1,6 +1,6 @@
{
"name": "lstv2",
"version": "2.24.1",
"version": "2.27.0",
"type": "module",
"scripts": {
"dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"",
@@ -21,8 +21,8 @@
"db:dev": "npm run build && npm run db:generate && npm run db:migrate",
"deploy": "standard-version --conventional-commits && npm run build",
"zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"v1Build": "cd C:\\Users\\matthes01\\Documents\\logisticsSupportTool && npm run oldBuilder",
"scriptBuild": "powershell -ExecutionPolicy Bypass -File server/scripts/build.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"newBuild": "npm run build:server && npm run build:frontend && npm run zipServer && npm run copyToNew",
"copyToNew": "powershell -ExecutionPolicy Bypass -File server/scripts/copyToLst.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"removeOld": "rimraf dist && rimraf frontend/dist",
"prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev",
"commit": "cz",
@@ -36,7 +36,7 @@
}
},
"admConfig": {
"build": 530,
"build": 661,
"oldBuild": "backend-0.1.3.zip"
},
"devDependencies": {
@@ -78,6 +78,7 @@
"fast-xml-parser": "^5.2.5",
"fs-extra": "^11.3.0",
"jsonwebtoken": "^9.0.2",
"morgan": "^1.10.1",
"mssql": "^11.0.1",
"nodemailer": "^7.0.3",
"nodemailer-express-handlebars": "^7.0.0",

View File

@@ -1,16 +1,19 @@
import { eq } from "drizzle-orm";
import { db } from "../../database/dbclient.js";
import { settings } from "../../database/schema/settings.js";
import { query } from "../services/sqlServer/prodSqlServer.js";
import { plantInfo } from "../services/sqlServer/querys/dataMart/plantInfo.js";
import { createLog } from "../services/logger/logger.js";
import { getSettings } from "../services/server/controller/settings/getSettings.js";
export const createSSCC = async (runningNumber: number) => {
// get the token
const plantToken = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
let serverSettings = (await getSettings()) as any;
const plantToken = serverSettings?.filter(
(n: any) => n.name === "plantToken"
);
// const plantToken = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "plantToken"));
let global: any = []; // get from plant address in basis enter the entire string here.
try {

View File

@@ -1,6 +1,7 @@
import { eq } from "drizzle-orm";
import { db } from "../../database/dbclient.js";
import { settings } from "../../database/schema/settings.js";
import { getSettings } from "../services/server/controller/settings/getSettings.js";
// create the test server stuff
const testServers = [
@@ -12,19 +13,22 @@ const testServers = [
export const prodEndpointCreation = async (endpoint: string) => {
let url = "";
//get the plant token
const plantToken = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
let serverSettings = await getSettings();
const plantToken = serverSettings?.filter((n) => n.name === "plantToken");
// await db
// .select()
// .from(settings)
// .where(eq(settings.name, "plantToken"));
// check if we are a test server
const testServer = testServers.some(
(server) => server.token === plantToken[0]?.value
);
const server = await db
.select()
.from(settings)
.where(eq(settings.name, "dbServer"));
const server = serverSettings?.filter((n) => n.name === "dbServer");
// await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
if (testServer) {
//filter out what testserver we are

View File

@@ -29,6 +29,11 @@ import eom from "./services/eom/eomService.js";
import dataMart from "./services/dataMart/dataMartService.js";
import qualityRequest from "./services/quality/qualityService.js";
import produser from "./services/prodUser/prodUser.js";
import {
getSettings,
serverSettings,
} from "./services/server/controller/settings/getSettings.js";
import type { Settings } from "./types/settings.js";
// create the main prodlogin here
const username = "lst_user";
@@ -36,15 +41,16 @@ const password = "Alpla$$Prod";
export const lstAuth = btoa(`${username}:${password}`);
// checking to make sure we have the settings intialized
const { data: settingsData, error: settingError } = await tryCatch(
db.select().from(settings)
);
// const { data: settingsData, error: settingError } = await tryCatch(
// db.select().from(settings)
// );
if (settingError) {
throw Error("Error getting settings from the db. critical error.");
}
// if (settingError) {
// throw Error("Error getting settings from the db. critical error.");
// }
const serverIntialized: any = await getSettings();
const serverIntialized: any = settingsData;
export const installed =
serverIntialized.length === 0 && process.env.NODE_ENV !== "development"
? false
@@ -211,7 +217,8 @@ serve(
/**
* Only for ocme until we get them switched over to the single port setup.
*/
const setting = await db.select().from(settings);
// const setting = await db.select().from(settings);
const setting = serverSettings;
const isActive = setting.filter((n) => n.name === "ocmeService");
if (ocmeport && isActive[0]?.value === "1") {
serve(

View File

@@ -0,0 +1,61 @@
param(
[string]$IncludesFile = ".includes",
[string]$Destination = "C:\Users\matthes01\Documents\lst\lstV2",
[string]$BaseDir = "C:\Users\matthes01\Documents\lst"
)
# .\copy-includes.ps1 will run with defaults
# .\copy-includes.ps1 -IncludesFile ".\mylist.txt" -Destination "D:\build\lstV2" will override defaults
if (-Not (Test-Path $IncludesFile)) {
Write-Error "Includes file not found: $IncludesFile"
exit 1
}
# Ensure destination exists
if (!(Test-Path -Path $Destination)) {
New-Item -ItemType Directory -Path $Destination | Out-Null
Write-Host "Folder created: $Destination"
}
# Empty the destination folder
Get-ChildItem -Path $Destination -Recurse -Force | Remove-Item -Recurse -Force
# If BaseDir wasnt explicitly passed in, use IncludesFile directory
if (-not $PSBoundParameters.ContainsKey('BaseDir')) {
$BaseDir = Split-Path -Parent (Resolve-Path $IncludesFile)
}
# Read includes list (ignore blank lines & comments)
$items = Get-Content $IncludesFile |
ForEach-Object { $_.Trim() } |
Where-Object { $_ -and -not $_.StartsWith("#") }
foreach ($item in $items) {
if ([System.IO.Path]::IsPathRooted($item)) {
# Absolute path (rare case)
$sourcePath = $item
$relative = Split-Path $item -Leaf # just take folder/file name
} else {
# Relative to BaseDir
$sourcePath = Join-Path $BaseDir $item
$relative = $item # keep full relative path e.g. "frontend\dist"
}
if (-Not (Test-Path $sourcePath)) {
Write-Warning "Skipping missing path: $sourcePath"
continue
}
# Destination path should preserve the relative structure
$targetPath = Join-Path $Destination $relative
# Ensure the parent folder exists
$targetDir = Split-Path $targetPath -Parent
if (-not (Test-Path $targetDir)) {
New-Item -ItemType Directory -Path $targetDir -Force | Out-Null
}
Write-Host "Copying $sourcePath -> $targetPath" -ForegroundColor Cyan
Copy-Item -Path $sourcePath -Destination $targetPath -Recurse -Force
}

View File

@@ -4,6 +4,7 @@ import { db } from "../../database/dbclient.js";
import { serverData } from "../../database/schema/serverData.js";
import { eq, sql } from "drizzle-orm";
import { createLog } from "../services/logger/logger.js";
import { serverSettings } from "../services/server/controller/settings/getSettings.js";
type UpdateServerResponse = {
success: boolean;
@@ -48,6 +49,7 @@ export const updateServer = async (
};
}
console.log(serverInfo);
const scriptPath = `${process.env.DEVFOLDER}\\server\\scripts\\update.ps1 `;
const args = [
"-NoProfile",

View File

@@ -37,7 +37,7 @@ export const registerUser = async (
.values({ username, email, password })
.returning({ user: users.username, email: users.email });
if (usercount.length <= 1) {
if (usercount.length === 0) {
createLog(
"info",
"auth",

View File

@@ -7,6 +7,7 @@ import { createPassword } from "../../utils/createPassword.js";
import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../../../notifications/controller/sendMail.js";
import { settings } from "../../../../../database/schema/settings.js";
import { getSettings } from "../../../server/controller/settings/getSettings.js";
export const updateUserADM = async (userData: User) => {
/**
@@ -40,8 +41,8 @@ export const updateUserADM = async (userData: User) => {
};
}
const { data: s, error: se } = await tryCatch(db.select().from(settings));
//const { data: s, error: se } = await tryCatch(db.select().from(settings));
const { data: s, error: se } = await tryCatch(getSettings());
if (se) {
return {
success: false,

View File

@@ -7,16 +7,16 @@ import { deliveryByDateRange } from "../../sqlServer/querys/dataMart/deleveryByD
import { addDays, format } from "date-fns";
export const getDeliveryByDateRange = async (data: any | null) => {
const { data: plantToken, error: plantError } = await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
);
if (plantError) {
return {
success: false,
message: "Error getting Settings",
data: plantError,
};
}
// const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken"))
// );
// if (plantError) {
// return {
// success: false,
// message: "Error getting Settings",
// data: plantError,
// };
// }
let deliverys: any = [];
let updatedQuery = deliveryByDateRange;

View File

@@ -4,18 +4,21 @@ import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { openOrders } from "../../sqlServer/querys/dataMart/openOrders.js";
import { serverSettings } from "../../server/controller/settings/getSettings.js";
export const getOpenOrders = async (data: any | null) => {
const { data: plantToken, error: plantError } = await tryCatch(
db.select().from(settings).where(eq(settings.name, "plantToken"))
);
if (plantError) {
return {
success: false,
message: "Error getting Settings",
data: plantError,
};
}
// const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken"))
// );
// if (plantError) {
// return {
// success: false,
// message: "Error getting Settings",
// data: plantError,
// };
// }
const plantToken = serverSettings.filter((n) => n.name === "plantToken");
let orders: any = [];
let updatedQuery = openOrders;

View File

@@ -0,0 +1,47 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { articleInfo } from "../../sqlServer/querys/psiReport/articleData.js";
// type ArticleData = {
// id: string
// }
export const getGetPSIArticleData = async (avs: string) => {
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const { data, error } = (await tryCatch(
query(articleInfo.replace("[articles]", avs), "PSI article info")
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the article info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the article info`,
data: error,
};
}
articles = data.data;
return {
success: true,
message: "PSI Article Data",
data: articles,
};
};

View File

@@ -0,0 +1,63 @@
import { and, between, inArray, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
// type ArticleData = {
// id: string
// }
export const psiGetInventory = async (
avs: string,
startDate: string,
endDate: string
) => {
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const ids = avs.split(",").map((id) => id.trim());
const { data, error } = (await tryCatch(
db
.select()
.from(invHistoricalData)
.where(
and(
inArray(invHistoricalData.article, ids),
between(invHistoricalData.histDate, startDate, endDate)
)
)
//.limit(100)
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data;
console.log(articles.length);
return {
success: true,
message: "PSI planning Data",
data: articles,
};
};

View File

@@ -0,0 +1,63 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js";
// type ArticleData = {
// id: string
// }
export const psiGetPlanningData = async (
avs: string,
startDate: string,
endDate: string
) => {
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const { data, error } = (await tryCatch(
query(
planningNumbersByAVDate
.replace("[articles]", avs)
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"PSI planning info"
)
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data.data;
return {
success: true,
message: "PSI planning Data",
data: articles.map((n: any) => {
if (n.PalDay) {
return { ...n, PalDay: n.PalDay.toFixed(2) };
}
return n;
}),
};
};

View File

@@ -0,0 +1,63 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { productionNumbers } from "../../sqlServer/querys/psiReport/prodcuctionNumbers.js";
// type ArticleData = {
// id: string
// }
export const psiGetProductionData = async (
avs: string,
startDate: string,
endDate: string
) => {
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const { data, error } = (await tryCatch(
query(
productionNumbers
.replace("[articles]", avs)
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"PSI production info"
)
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data.data;
return {
success: true,
message: "PSI planning Data",
data: articles.map((n: any) => {
if (n.PalDay) {
return { ...n, PalDay: n.PalDay.toFixed(2) };
}
return n;
}),
};
};

View File

@@ -9,6 +9,10 @@ import fakeEDI from "./route/fakeEDI.js";
import addressCorrections from "./route/getCityStateData.js";
import fifoIndex from "./route/getFifoIndex.js";
import financeAudit from "./route/getFinanceAudit.js";
import psiArticleData from "./route/getPsiArticleData.js";
import psiPlanningData from "./route/getPsiPlanningData.js";
import psiProductionData from "./route/getPsiProductionData.js";
import psiInventory from "./route/getPsiinventory.js";
const app = new OpenAPIHono();
@@ -23,6 +27,10 @@ const routes = [
addressCorrections,
fifoIndex,
financeAudit,
psiArticleData,
psiPlanningData,
psiProductionData,
psiInventory,
] as const;
const appRoutes = routes.forEach((route) => {

View File

@@ -0,0 +1,61 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { getDeliveryByDateRange } from "../controller/getDeliveryByDateRange.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { getGetPSIArticleData } from "../controller/psiGetArticleData.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the psiarticleData.",
method: "get",
path: "/psiarticledata",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const articles: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/psiarticledata" });
//console.log(articles["avs"][0]);
const { data, error } = await tryCatch(
getGetPSIArticleData(articles ? articles["avs"][0] : null)
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the articles.",
data: error,
},
400
);
}
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400
);
}
);
export default app;

View File

@@ -0,0 +1,64 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { psiGetPlanningData } from "../controller/psiGetPlanningData.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the psiarticleData.",
method: "get",
path: "/psiplanningdata",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const q: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/psiplanningdata" });
//console.log(articles["avs"][0]);
const { data, error } = await tryCatch(
psiGetPlanningData(
q["avs"] ? q["avs"][0] : null,
q["startDate"] ? q["startDate"][0] : null,
q["endDate"] ? q["endDate"][0] : null
)
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the planning.",
data: error,
},
400
);
}
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400
);
}
);
export default app;

View File

@@ -0,0 +1,64 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { psiGetProductionData } from "../controller/psiGetProductionData.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the psiproductiondata.",
method: "get",
path: "/psiproductiondata",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const q: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/psiproductiondata" });
//console.log(articles["avs"][0]);
const { data, error } = await tryCatch(
psiGetProductionData(
q["avs"] ? q["avs"][0] : null,
q["startDate"] ? q["startDate"][0] : null,
q["endDate"] ? q["endDate"][0] : null
)
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the production.",
data: error,
},
400
);
}
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400
);
}
);
export default app;

View File

@@ -0,0 +1,64 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { psiGetInventory } from "../controller/psiGetInventory.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the getPsiinventory.",
method: "get",
path: "/getpsiinventory",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const q: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/getpsiinventory" });
//console.log(articles["avs"][0]);
const { data, error } = await tryCatch(
psiGetInventory(
q["avs"] ? q["avs"][0] : null,
q["startDate"] ? q["startDate"][0] : null,
q["endDate"] ? q["endDate"][0] : null
)
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the production.",
data: error,
},
400
);
}
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400
);
}
);
export default app;

View File

@@ -0,0 +1,32 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { format } from "date-fns";
export const historicalInvByDate = async (date: string) => {
const histDate = new Date(date);
const { data, error } = (await tryCatch(
db
.select()
.from(invHistoricalData)
.where(
eq(invHistoricalData.histDate, format(histDate, "yyyy-MM-dd"))
)
)) as any;
if (error) {
return {
success: false,
message: "There was an error with getting the inventory",
data: error,
};
}
return {
success: true,
message: `Historical inventory for ${date}`,
data: data,
};
};

View File

@@ -0,0 +1,24 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { format } from "date-fns";
import { query } from "../../sqlServer/prodSqlServer.js";
import { lastPurchasePrice } from "../../sqlServer/querys/eom/lstPurchasePrice.js";
export const lastPurchase = async () => {
const { data, error } = (await tryCatch(
query(lastPurchasePrice, "Last purchase price")
)) as any;
if (error) {
return {
success: false,
message: "Error getting the last purchase price",
data: error,
};
}
return {
success: true,
message: `Last purchase price for all av in the last 5 years`,
data: data.data,
};
};

View File

@@ -0,0 +1,23 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { lastSalesPriceCheck } from "../../sqlServer/querys/eom/lastSalesprice.js";
export const lastSales = async (date: string) => {
const { data, error } = (await tryCatch(
query(lastSalesPriceCheck.replace("[date]", date), "Last sales price")
)) as any;
if (error) {
return {
success: false,
message: "Error getting the last sales price",
data: error,
};
}
return {
success: true,
message: `Last sales price for all av in the last 5 years`,
data: data.data,
};
};

View File

@@ -1,15 +0,0 @@
// import {prisma} from "database";
// import {createLog} from "logging";
// export const deleteHistory = async (date: string) => {
// // delete the inventory if it equals this date
// try {
// const remove = await prisma.$executeRaw`
// DELETE FROM historyInventory
// WHERE histDate < ${date}
// `;
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
// } catch (error) {
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
// }
// };

View File

@@ -4,10 +4,48 @@ const app = new OpenAPIHono();
import stats from "./route/stats.js";
import history from "./route/invHistory.js";
const routes = [stats, history] as const;
import { createJob } from "../notifications/utils/processNotifications.js";
import { historicalInvIMmport } from "./utils/historicalInv.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { query } from "../sqlServer/prodSqlServer.js";
import { shiftChange } from "../sqlServer/querys/misc/shiftChange.js";
import { createLog } from "../logger/logger.js";
import lastPurch from "./route/getLastPurchPrice.js";
import lastSales from "./route/getLastSalesPrice.js";
const routes = [stats, history, lastPurch, lastSales] as const;
const appRoutes = routes.forEach((route) => {
app.route("/eom", route);
});
setTimeout(async () => {
const { data: shift, error: shiftError } = (await tryCatch(
query(shiftChange, "shift change from material.")
)) as any;
if (shiftError) {
createLog(
"error",
"eom",
"eom",
"There was an error getting the shift times will use fallback times"
);
}
// shift split
const shiftTimeSplit = shift?.data[0]?.shiftChange.split(":");
const cronSetup = `${
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[1])}` : "0"
} ${
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[0])}` : "7"
} * * *`;
//console.log(cronSetup);
createJob("eom_historical_inv", cronSetup, historicalInvIMmport);
}, 5 * 1000);
// the time we want to run the hostircal data should be the same time the historical data run on the server
// getting this from the shift time
export default app;

View File

@@ -0,0 +1,41 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { lastPurchase } from "../controller/getLastPurchasesPrice.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["eom"],
summary: "Returns last sales price.",
method: "get",
path: "/lastpurchprice",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/lastpurchprice" });
try {
const res = await lastPurchase();
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,43 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { lastPurchase } from "../controller/getLastPurchasesPrice.js";
import { lastSales } from "../controller/getLastestSalesPrice.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["eom"],
summary: "Returns last sales price.",
method: "get",
path: "/lastsalesprice",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
const month: string = c.req.query("month") ?? "";
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/lastsalesprice" });
try {
const res = await lastSales(month);
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -1,6 +1,7 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { historicalInvByDate } from "../controller/getHistoricalInvByDate.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
@@ -12,20 +13,24 @@ const EomStat = z.object({
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets the correct eom history.",
method: "post",
summary: "Gets History Data by date.",
method: "get",
path: "/histinv",
request: {
params: EomStat,
},
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
const month: string = c.req.query("month") ?? "";
apiHit(c, { endpoint: "/histinv" });
try {
return c.json({ success: true, message: "", data: [] }, 200);
const res = await historicalInvByDate(month);
return c.json(
{ success: res.success, message: res.message, data: res.data },
200
);
} catch (error) {
return c.json(
{

View File

@@ -0,0 +1,114 @@
import { sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { totalInvNoRn } from "../../sqlServer/querys/dataMart/totalINV.js";
import { format } from "date-fns-tz";
import { serverSettings } from "../../server/controller/settings/getSettings.js";
import { deleteHistory } from "./removeHistorical.js";
import { activeArticle } from "../../sqlServer/querys/dataMart/article.js";
export const historicalInvIMmport = async () => {
const plantToken = serverSettings.filter((n) => n.name === "plantToken");
const { data, error } = (await tryCatch(
db.select().from(invHistoricalData)
)) as any;
if (error) {
createLog(
"error",
"eom",
"eom",
`There was an error getting the historical data`
);
}
// check if we have data already for today this way we dont duplicate anything.
const today = new Date();
today.setDate(today.getDate() - 1);
const dateCheck = data?.filter(
(i: any) => i.histDate === format(today, "yyyy-MM-dd")
);
if (dateCheck.length === 0) {
// get the historical data from the sql
const { data: inv, error: invError } = (await tryCatch(
query(totalInvNoRn, "eom historical data")
)) as any;
if (invError) {
createLog(
"error",
"eom",
"eom",
`There was an error getting the sql data`
);
return;
}
if (inv.data.length === 0) {
createLog("error", "eom", "eom", inv.message);
return;
}
const { data: articles, error: avError } = (await tryCatch(
query(activeArticle, "Get active articles")
)) as any;
const av = articles.data.length > 0 ? articles.data : ([] as any);
const importInv = inv.data ? inv.data : [];
const eomImportData = importInv.map((i: any) => {
return {
histDate: sql`(NOW() - INTERVAL '1 day')::date`,
plantToken: plantToken[0].value,
article: i.av,
articleDescription: i.Alias,
materialType:
av.filter((a: any) => a.IdArtikelvarianten === i.av)
.length > 0
? av.filter(
(a: any) => a.IdArtikelvarianten === i.av
)[0]?.TypeOfMaterial
: "Item not defined",
total_QTY: i.Total_PalletQTY,
avaliable_QTY: i.Avaliable_PalletQTY,
coa_QTY: i.COA_QTY,
held_QTY: i.Held_QTY,
consignment: i.Consigment,
lot_Number: i.lot,
};
});
const { data: dataImport, error: errorImport } = await tryCatch(
db.insert(invHistoricalData).values(eomImportData)
);
if (errorImport) {
createLog(
"error",
"eom",
"eom",
`There was an error importing all the inventory data.`
);
return;
}
if (dataImport) {
createLog(
"info",
"eom",
"eom",
`All data was imported succefully.`
);
return;
}
} else {
createLog("info", "eom", "eom", `Yesterdays Data already in..`);
}
// do the check to delete old data
deleteHistory();
};

View File

@@ -0,0 +1,51 @@
// import {prisma} from "database";
// import {createLog} from "logging";
import { lte, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
// export const deleteHistory = async (date: string) => {
// // delete the inventory if it equals this date
// try {
// const remove = await prisma.$executeRaw`
// DELETE FROM historyInventory
// WHERE histDate < ${date}
// `;
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
// } catch (error) {
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
// }
// };
export const deleteHistory = async () => {
const { data, error } = await tryCatch(
db
.delete(invHistoricalData)
.where(
lte(
invHistoricalData.histDate,
sql`(NOW() - INTERVAL '365 day')::date`
)
)
);
if (error) {
createLog(
"error",
"eom",
"eom",
"There was an error deleting the historical data."
);
return;
}
createLog(
"info",
"eom",
"eom",
"Data older than 45 days has been deleted."
);
};

View File

@@ -1,11 +1,13 @@
import {OpenAPIHono} from "@hono/zod-openapi";
import {apiReference} from "@scalar/hono-api-reference";
import {settings} from "../../../../database/schema/settings.js";
import {db} from "../../../../database/dbclient.js";
import {eq} from "drizzle-orm";
import { OpenAPIHono } from "@hono/zod-openapi";
import { apiReference } from "@scalar/hono-api-reference";
import { settings } from "../../../../database/schema/settings.js";
import { db } from "../../../../database/dbclient.js";
import { eq } from "drizzle-orm";
import { getSettings } from "../../server/controller/settings/getSettings.js";
const app = new OpenAPIHono();
const plantToken = await db.select().from(settings).where(eq(settings.name, "plantToken"));
const serverSettings = await getSettings();
const plantToken = serverSettings.filter((n) => n.name === "plantToken") as any; //await db.select().from(settings).where(eq(settings.name, "plantToken"));
let pToken = plantToken[0]?.value;
const testServers = ["test1", "test2", "test3"];
@@ -17,7 +19,7 @@ app.get(
apiReference({
theme: "kepler",
layout: "classic",
defaultHttpClient: {targetKey: "node", clientKey: "axios"},
defaultHttpClient: { targetKey: "node", clientKey: "axios" },
pageTitle: "Lst API Reference",
hiddenClients: [
"libcurl",

View File

@@ -0,0 +1,50 @@
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import axios from "axios";
import { pino } from "pino";
import build from "pino-abstract-transport";
import { tryCatch } from "../../globalUtils/tryCatch.js";
const pinoLogLevels: any = {
10: "trace",
20: "debug",
30: "info",
40: "warn",
50: "error",
60: "fatal",
};
export default async function buildGoTransport() {
try {
return build(async function (source) {
for await (let obj of source) {
// Insert log entry into the PostgreSQL database using Drizzle ORM
// convert to the name to make it more easy to find later :P
const levelName = pinoLogLevels[obj.level] || "unknown";
// await db.insert(logs).values({
// level: levelName,
// username: obj?.username.toLowerCase(),
// service: obj?.service.toLowerCase(),
// message: obj.msg,
// });
const { data, error } = (await tryCatch(
axios.post(`${process.env.LST_BASE_URL}/api/v1/log`, {
service: obj?.service.toLowerCase(),
level: levelName,
message: obj.msg,
})
)) as any;
if (error) {
console.log(
"The go server must be offline so we cant post the new logs."
);
}
// console.log(`Go log level: ${levelName}`);
}
});
} catch (err) {
console.error("Error inserting log into database:", err);
}
}

View File

@@ -18,6 +18,14 @@ const transport = pino.transport({
{
target: "./dbTransport.js",
},
// Only log to Go if LST_USE_GO=true
...(process.env.LST_USE_GO === "true"
? [
{
target: "./goTransport.js", // New transport for Go
},
]
: []),
],
});

View File

@@ -1,10 +1,10 @@
import {OpenAPIHono} from "@hono/zod-openapi";
import { OpenAPIHono } from "@hono/zod-openapi";
// routes
import clearLog from "./routes/clearLog.js";
import {db} from "../../../database/dbclient.js";
import {settings} from "../../../database/schema/settings.js";
import {logCleanup} from "./controller/logCleanup.js";
import { db } from "../../../database/dbclient.js";
import { settings } from "../../../database/schema/settings.js";
import { logCleanup } from "./controller/logCleanup.js";
import createNewLog from "./routes/createLog.js";
import getLogs from "./routes/getLogs.js";
import stream from "./routes/streamLogs.js";
@@ -12,14 +12,17 @@ import stream from "./routes/streamLogs.js";
const app = new OpenAPIHono();
const routes = [clearLog, createNewLog, getLogs, stream] as const;
const setting = await db.select().from(settings);
//const setting = await db.select().from(settings);
const appRoutes = routes.forEach((route) => {
app.route("/logger", route);
});
app.all("/logger/*", (c) => {
return c.json({success: false, message: "You have encounters a log route that dose not exist."});
return c.json({
success: false,
message: "You have encounters a log route that dose not exist.",
});
});
// run the clean up job ones on server restart/crash/update and then once a date

View File

@@ -7,6 +7,8 @@ import { postForecast } from "../postForecast.js";
import { query } from "../../../../../sqlServer/prodSqlServer.js";
import { activeArticle } from "../../../../../sqlServer/querys/dataMart/article.js";
import { addDays } from "date-fns";
import { sendEmail } from "../../../../../notifications/controller/sendMail.js";
import { createLog } from "../../../../../logger/logger.js";
let customerID = 4;
export const lorealForecast = async (data: any, user: any) => {
@@ -111,6 +113,9 @@ export const lorealForecast = async (data: any, user: any) => {
);
if (activeAV.length === 0) {
if (typeof forcast.customerArticleNo === "number") {
missingSku.push(forcast);
}
continue;
}
@@ -181,6 +186,9 @@ export const lorealForecast = async (data: any, user: any) => {
);
if (activeAV.length === 0) {
if (typeof forcast.customerArticleNo === "number") {
missingSku.push(forcast);
}
continue;
}
@@ -190,6 +198,56 @@ export const lorealForecast = async (data: any, user: any) => {
//console.log(comForecast);
// email the for the missing ones
const missedGrouped = Object.values(
missingSku.reduce((acc: any, item: any) => {
const key = item.customerArticleNo;
if (!acc[key]) {
// first time we see this customer
acc[key] = item;
} else {
// compare dates and keep the earliest
if (
new Date(item.requirementDate) <
new Date(acc[key].requirementDate)
) {
acc[key] = item;
}
}
return acc;
}, {})
);
const emailSetup = {
email: "Blake.matthes@alpla.com; Stuart.Gladney@alpla.com; Harold.Mccalister@alpla.com; Jenn.Osbourn@alpla.com",
subject:
missedGrouped.length > 0
? `Alert! There are ${missedGrouped.length}, missing skus.`
: `Alert! There is a missing SKU.`,
template: "missingLorealSkus",
context: {
items: missedGrouped,
},
};
const { data: sentEmail, error: sendEmailError } = await tryCatch(
sendEmail(emailSetup)
);
if (sendEmailError) {
createLog(
"error",
"blocking",
"notify",
"Failed to send email, will try again on next interval"
);
return {
success: false,
message: "Failed to send email, will try again on next interval",
};
}
// if the customerarticle number is not matching just ignore it
const predefinedObject = {
receivingPlantId: plantToken[0].value,

View File

@@ -157,7 +157,7 @@ export const standardOrders = async (data: any, user: any) => {
deliveryDate: excelDateStuff(o.DeliveryDate),
customerLineItemNo: o.CustomerLineNumber, // this is how it is currently sent over from abbott
customerReleaseNo: o.CustomerRealeaseNumber, // same as above
remark: o.remark === "" ? null : o.remark,
remark: o.Remark === "" ? null : o.Remark,
},
],
};
@@ -169,7 +169,7 @@ export const standardOrders = async (data: any, user: any) => {
orders: [...predefinedObject.orders, ...nOrder],
};
//console.log(updatedPredefinedObject);
//console.log(updatedPredefinedObject.orders[0]);
// post the orders to the server
const posting: any = await postOrders(updatedPredefinedObject, user);

View File

@@ -13,10 +13,12 @@ type Data = {
runningNr: string;
lotNum: number;
};
export const consumeMaterial = async (data: Data, prod: any) => {
export const consumeMaterial = async (data: Data) => {
const { runningNr, lotNum } = data;
// replace the rn
console.log(data);
const rnReplace = labelData.replaceAll("[rn]", runningNr);
let barcode;
@@ -26,12 +28,7 @@ export const consumeMaterial = async (data: Data, prod: any) => {
barcode = r?.data;
} catch (error) {
console.log(error);
createLog(
"error",
prod.user.username,
"logistics",
`Error getting barcode: ${error}`
);
createLog("error", "", "logistics", `Error getting barcode: ${error}`);
}
if (barcode.length === 0) {

View File

@@ -9,6 +9,10 @@ import { sendEmail } from "../../../notifications/controller/sendMail.js";
import { settings } from "../../../../../database/schema/settings.js";
import { generateOneTimeKey } from "../../../../globalUtils/singleUseKey.js";
import { eq } from "drizzle-orm";
import {
getSettings,
serverSettings,
} from "../../../server/controller/settings/getSettings.js";
export const createSiloAdjustment = async (
data: any | null,
@@ -18,18 +22,21 @@ export const createSiloAdjustment = async (
* Creates a silo adjustment based off warehouse, location, and qty.
* qty will come from the hmi, prolink, or silo patrol
*/
const { data: set, error: setError } = await tryCatch(
db.select().from(settings)
);
// const { data: set, error: setError } = await tryCatch(
// db.select().from(settings)
// );
if (setError) {
return {
success: false,
message: `There was an error getting setting data to post to the server.`,
data: setError,
};
}
// const { data: set, error: setError } = await tryCatch(getSettings());
// if (setError) {
// return {
// success: false,
// message: `There was an error getting setting data to post to the server.`,
// data: setError,
// };
// }
const set = serverSettings.length === 0 ? [] : serverSettings;
// getting stock data first so we have it prior to the adjustment
const { data: s, error: stockError } = await tryCatch(
query(siloQuery, "Silo data Query")

View File

@@ -12,18 +12,25 @@ import { delay } from "../../../../globalUtils/delay.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { settings } from "../../../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import {
getSettings,
serverSettings,
} from "../../../server/controller/settings/getSettings.js";
export const migrateAdjustments = async () => {
/**
* Migrates the silo adjustments from v1 to v2
*/
const { data, error } = await tryCatch(db.select().from(settings));
//const { data, error } = await tryCatch(db.select().from(settings));
// const { data, error } = await tryCatch(getSettings());
if (error) {
createLog("error", "silo", "logistics", "Getting settings.");
return;
}
// if (error) {
// createLog("error", "silo", "logistics", "Getting settings.");
// return;
// }
const data = serverSettings.length === 0 ? [] : serverSettings;
const migrationCompleted = data?.filter(
(n) => n.name === "siloAdjMigrations"
@@ -44,7 +51,7 @@ export const migrateAdjustments = async () => {
}
const { data: s, error: siloError } = await tryCatch(
axios.get(
`http://${server[0].value}:${port[0].value}/api/v1/warehouse/getSilosAdjustment?startDate=1/1/2020&endDate=4/1/2026`
`http://${server[0]?.value}:${port[0]?.value}/api/v1/warehouse/getSilosAdjustment?startDate=1/1/2020&endDate=4/1/2026`
)
);

View File

@@ -19,7 +19,7 @@ app.openapi(
summary: "Consumes material based on its running number",
method: "post",
path: "/consume",
middleware: authMiddleware,
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
@@ -51,33 +51,29 @@ app.openapi(
);
}
apiHit(c, { endpoint: "/consume", lastBody: data });
const authHeader = c.req.header("Authorization");
const token = authHeader?.split("Bearer ")[1] || "";
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
//return apiReturn(c, true, access?.message, access?.data, 200);
const consume = await consumeMaterial(data, payload);
return c.json(
{ success: consume?.success, message: consume?.message },
200
);
} catch (error) {
//console.log(error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400
);
}
const consume = await consumeMaterial(data);
return c.json(
{ success: consume?.success, message: consume?.message },
200
);
} catch (error) {
return c.json({ success: false, message: "Unauthorized" }, 401);
//console.log(error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400
);
}
}
);

View File

@@ -57,6 +57,7 @@ app.openapi(
});
}
console.log(silo);
return c.json({
success: silo.success,
message: silo.message,

View File

@@ -7,6 +7,10 @@ export const excelDateStuff = (serial: number, time: any = 0) => {
// get the local timezone
const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number;
if (serial % 1 === 0) {
time = 800;
}
const addHours = serial + localoffset / 24;
//console.log(getJsDateFromExcel(addHours));
if (typeof serial !== "number" || serial <= 0) {
@@ -23,6 +27,8 @@ export const excelDateStuff = (serial: number, time: any = 0) => {
date.setMinutes(minutes);
}
//console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
//console.log(serial);
//console.log(date.toISOString());
return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred
};

View File

@@ -239,15 +239,16 @@ export const tiImport = async () => {
.replaceAll("[customerState]", orderData[0].city.split(",")[1])
.replaceAll("[customerZip]", orderData[0].zipCode)
.replaceAll("[customerPO]", orderData[0].Header)
.replaceAll(
"[glCoding]",
`52410-${
orderData[0].artileType.toLowerCase() === "preform" ||
orderData[0].artileType.toLowerCase() === "metalCage"
? 31
: plantI[0].greatPlainsPlantCode
}`
) // {"52410 - " + (artileType.toLowerCase() === "preform" || artileType.toLowerCase() === "metalCage" ? 31: plantInfo[0].greatPlainsPlantCode)}
// .replaceAll(
// "[glCoding]",
// `52410-${
// orderData[0].artileType.toLowerCase() === "preform" ||
// orderData[0].artileType.toLowerCase() === "metalCage"
// ? 31
// : plantI[0].greatPlainsPlantCode
// }`
// ) // {"52410 - " + (artileType.toLowerCase() === "preform" || artileType.toLowerCase() === "metalCage" ? 31: plantInfo[0].greatPlainsPlantCode)}
.replaceAll("[glCoding]", `52410`)
.replaceAll(
"[pfc]",
`${
@@ -257,6 +258,15 @@ export const tiImport = async () => {
: orderData[0].costCenter
}`
)
.replaceAll(
"[locCode]",
`${
orderData[0].artileType.toLowerCase() === "preform" ||
orderData[0].artileType.toLowerCase() === "metalCage"
? 31
: plantI[0].greatPlainsPlantCode
}`
)
.replaceAll("[priceSheet]", await scacCheck(orderData));
//send over to be processed
@@ -276,16 +286,28 @@ export const tiImport = async () => {
* Update the db so we dont try to pull the next one
*/
const currentDate = new Date(Date.now());
const uniqueOrders = Array.from(
new Set([
...notiSet[0].notifiySettings.releases,
{
releaseNumber: header[0].releaseNumber,
timeStamp: new Date(Date.now()),
timeStamp: currentDate,
},
])
);
// 45 days ago
const dateLimit = new Date(
currentDate.getTime() - 45 * 24 * 60 * 60 * 1000
);
// filter dates
let filteredOrders = uniqueOrders.filter((item) => {
const time = new Date(item.timeStamp).getTime();
return time >= dateLimit.getTime();
});
const { data, error } = await tryCatch(
db
.update(notifications)
@@ -293,7 +315,7 @@ export const tiImport = async () => {
lastRan: sql`NOW()`,
notifiySettings: {
...notiSet[0].notifiySettings,
releases: uniqueOrders,
releases: filteredOrders,
},
})
.where(eq(notifications.name, "tiIntergration"))

View File

@@ -110,8 +110,11 @@ export let xmlPayloadTI = `
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
<ReferenceNumber type="PO Number" isPrimary="false">[customerPO]</ReferenceNumber>
[multieReleaseNumber]
<ReferenceNumber type="Store Number" isPrimary="false">[glCoding]</ReferenceNumber>
<!-- Comments here -->
<!-- <ReferenceNumber type="Store Number" isPrimary="false">[glCoding]</ReferenceNumber> -->
<ReferenceNumber type="GL Account Code" isPrimary="false">[glCoding]</ReferenceNumber>
<ReferenceNumber type="Profit Center" isPrimary="false">[pfc]</ReferenceNumber>
<ReferenceNumber type="Location Code" isPrimary="false">[locCode]</ReferenceNumber>
</ReferenceNumbers>
<Services/>
<EquipmentList/>

View File

@@ -68,8 +68,8 @@ export const note: any = [
{
name: "tiIntergration",
description: "Checks for new releases to be put into ti",
checkInterval: 2,
timeType: "hour",
checkInterval: 60,
timeType: "min",
emails: "",
active: false,
notifiySettings: {

View File

@@ -105,7 +105,7 @@ export const startNotificationMonitor = async () => {
}, 5 * 1000);
};
const createJob = async (
export const createJob = async (
id: string,
schedule: string,
task: () => Promise<void>

View File

@@ -0,0 +1,41 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{{!-- <link rel="stylesheet" href="styles/styles.css" /> --}}
{{> styles}}
</head>
<body>
<p>All,</p>
<p>The below SKU's do not currently have an av and will be ignored in the forcast import.</p>
<p>The date and qty are the first time needed showing from teh vmi report</p>
<table >
<thead>
<tr>
<th>Customer Article Number</th>
<th>First Date Needed</th>
<th>Quantity</th>
</tr>
</thead>
<tbody>
{{#each items}}
<tr>
<td>{{customerArticleNo}}</td>
<td>{{requirementDate}}</td>
<td>{{quantity}}</td>
</tr>
{{/each}}
</tbody>
</table>
<div>
<p>Thank you,</p>
<p>LST Team</p>
</div>
</body>
</html>

View File

@@ -96,7 +96,7 @@ export const bookInLabel = async (data: any) => {
"ocp",
`${
data.printer ? data.printer[0].name : "Manual book in"
}, "Error: ${error}`
}, "Error: ${error?.response.data}`
);
// console.log(error.response.data);
return {

View File

@@ -19,11 +19,18 @@ interface Printer {
// Add any other expected properties
}
interface Zechetti {
line: string;
printer: number;
printerName: string;
}
export const labelingProcess = async ({
line = null as string | null,
printer = null as Printer | null,
userPrinted = null,
rfidTag = null,
zechette = null as Zechetti | null,
} = {}) => {
/**
* Creates a label once all logic is passed
@@ -69,6 +76,34 @@ export const labelingProcess = async ({
}
}
// if we are running the zechettii
if (zechette) {
const macId = await getMac(zechette.line);
// filter out the lot for the line
filteredLot = lots.data.filter(
(l: any) => l.MachineID === macId[0]?.HumanReadableId
);
if (filteredLot.length === 0) {
createLog(
"error",
"labeling",
"ocp",
`There is not a lot assigned to ${line}.`
);
return {
success: false,
message: `There is not a lot assigned to ${line}.`,
};
}
// remap the printer so its the zechetti one
filteredLot = filteredLot.map((p: any) => ({
...p,
printerID: zechette.printer,
PrinterName: zechette.printerName,
}));
}
// if we came from a printer
if (printer) {
// filter the lot based on the printerID
@@ -170,20 +205,15 @@ export const labelingProcess = async ({
};
}
// check mm is good
// check the material... mm,color (auto and manual combined), pkg
const mmStaged = await isMainMatStaged(filteredLot[0]);
if (!mmStaged) {
createLog(
"error",
"labeling",
"ocp",
`Main material is not prepaired for lot ${filteredLot[0].lot}`
);
if (!mmStaged.success) {
createLog("error", "labeling", "ocp", mmStaged.message);
return {
success: false,
message: `Main material is not prepaired for lot ${filteredLot[0].lot}`,
message: mmStaged.message,
};
}
@@ -247,6 +277,28 @@ export const labelingProcess = async ({
let book: any = [];
if (bookin[0].value === "1") {
book = await bookInLabel(label.data);
if (!book.success) {
// createLog(
// "error",
// "labeling",
// "ocp",
// `Error Booking in label: ${book.errors[0].message}`
// );
createLog(
"error",
"labeling",
"ocp",
`There was an error booking in the label: ${JSON.stringify(
book.data
)}`
);
return {
success: false,
message: `Error Booking in label`,
data: book,
};
}
} else {
createLog("info", "labeling", "ocp", "Bookin is turned off.");

View File

@@ -0,0 +1,426 @@
import { eq } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { printerData } from "../../../../../database/schema/printers.js";
import { runProdApi } from "../../../../globalUtils/runProdApi.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
import { format, formatDuration, intervalToDuration } from "date-fns";
import { shiftChange } from "../../../sqlServer/querys/misc/shiftChange.js";
import { success } from "zod/v4";
type NewLotData = {
runningNumber: number;
lotNumber: number;
originalAmount: number;
level: number;
amount: number;
type: "lot" | "eom";
};
interface PendingJob {
timeoutId: NodeJS.Timeout;
runningNumber: string | number;
data: any;
consumeLot: any;
newQty: any;
scheduledFor: Date;
}
export const pendingJobs = new Map<string | number, PendingJob>();
/**
* Move manual material to a new lot.
*
* The data sent over should be
* Running number
* Lot number
* Orignal Quantity
* level of gaylord
* amount can be sent over as a precise amount
* type what way are we lots
*/
export const lotMaterialTransfer = async (data: NewLotData) => {
// check if we already have this running number scheduled
if (pendingJobs.has(data.runningNumber)) {
const job = pendingJobs.get(data.runningNumber) as PendingJob;
const duration = intervalToDuration({
start: new Date(),
end: job.scheduledFor,
});
createLog(
"error",
"materials",
"ocp",
`${
data.runningNumber
} is pending to be transfered already, remaining time ${formatDuration(
duration,
{ format: ["hours", "minutes", "seconds"] }
)}`
);
return {
success: false,
message: `${
data.runningNumber
} is pending to be transfered already, remaining time ${formatDuration(
duration,
{ format: ["hours", "minutes", "seconds"] }
)}`,
data: [],
};
}
// get the shift time
const { data: shift, error: shiftError } = (await tryCatch(
query(shiftChange, "shift change from material.")
)) as any;
if (shiftError) {
createLog(
"error",
"materials",
"ocp",
"There was an error getting the shift times will use fallback times"
);
}
// shift split
const shiftTimeSplit = shift?.data[0]?.shiftChange.split(":");
//console.log(shiftTimeSplit);
// Current time
const now = new Date();
// Target time: today at 06:35
const target = new Date(
now.getFullYear(),
now.getMonth(),
1, //now.getDate(),
shiftTimeSplit.length > 0 ? parseInt(shiftTimeSplit[0]) - 1 : 5, // this will parse the hour to remove teh zero
shiftTimeSplit.length > 0 ? parseInt(shiftTimeSplit[1]) + 3 : 3,
0,
0
);
// to early time
const early = new Date(
now.getFullYear(),
now.getMonth(),
1, //now.getDate(),
shiftTimeSplit.length > 0 ? parseInt(shiftTimeSplit[0]) - 1 : 5, // this will parse the hour to remove teh zero
shiftTimeSplit.length > 0 ? parseInt(shiftTimeSplit[1]) : 0,
0,
0
);
// next month just to be here
const nextMonth = new Date(
now.getFullYear(),
now.getMonth() + 1,
1, //now.getDate(),
shiftTimeSplit.length > 0 ? parseInt(shiftTimeSplit[0]) - 1 : 5, // this will parse the hour to remove teh zero
shiftTimeSplit.length > 0 ? parseInt(shiftTimeSplit[1]) : 0,
0,
0
);
// console.log(early, target);
// if we are to early return early only if we are sending over eom
if (data.type === "eom" && (early > now || target < now)) {
createLog(
"error",
"materials",
"ocp",
`Eom transfers is not allowed right now please try again at ${format(
nextMonth,
"M/d/yyyy hh:mm"
)} `
);
return {
success: false,
message: `Eom transfers is not allowed right now please try again at ${format(
nextMonth,
"M/d/yyyy hh:mm"
)} `,
data: [],
};
}
let timeoutTrans: number = data.type === "lot" ? 30 : 10;
// get the barcode, and layoutID from the running number
const { data: label, error: labelError } = (await tryCatch(
query(
labelInfo.replace("[runningNr]", `${data.runningNumber}`),
"Get label info"
)
)) as any;
if (labelError) {
createLog(
"error",
"materials",
"ocp",
"There was an error getting the label info"
);
return {
success: false,
message: "There was an error getting the label info",
data: labelError,
};
}
if (label.data.length === 0) {
createLog(
"error",
"materials",
"ocp",
`${data.runningNumber}: dose not exist or no longer in stock.`
);
return {
success: false,
message: `${data.runningNumber}: dose not exist or no longer in stock.`,
data: [],
};
}
//console.log(label);
if (label.data[0]?.stockStatus === "onStock") {
createLog(
"error",
"materials",
"ocp",
`${data.runningNumber}: currently in stock and not consumed to a lot.`
);
return {
success: false,
message: `${data.runningNumber}: currently in stock and not consumed to a lot.`,
data: [],
};
}
// get the pdf24 printer id
const { data: printer, error: printerError } = (await tryCatch(
db.select().from(printerData).where(eq(printerData.name, "PDF24"))
)) as any;
if (printerError) {
createLog(
"error",
"materials",
"ocp",
"There was an error the printer info"
);
return {
success: false,
message: "There was an error the printer info",
data: printerError,
};
}
// calculate the remaining amount bascially it will be orignal number * level sent over
// level should be sent in a decimal .25 .5 .75 .95 the 95 will allow basically the what looks to be a full gaylord but we always want to consume something
const newQty =
data.amount > 0
? data.amount
: (data.originalAmount * data.level).toFixed(0);
//console.log(data.amount);
// reprint the label and send it to pdf24
const reprintData = {
clientId: 999,
runningNo: label?.data[0].runnungNumber,
printerId: printer[0].humanReadableId,
layoutId: label?.data[0].labelLayout,
noOfCopies: 0,
quantity: newQty,
} as any;
//console.log(reprintData);
const { data: reprint, error: reprintError } = (await tryCatch(
runProdApi({
endpoint: "/public/v1.0/ProductionLabelling/ReprintLabel",
data: [reprintData],
})
)) as any;
if (!reprint.success) {
createLog(
"error",
"materials",
"ocp",
`RN:${data.runningNumber}, Reprinting Error: ${reprint.data.data.message}`
);
return {
success: false,
message: `RN:${data.runningNumber}, Reprinting Error: ${reprint.data.data.message}`,
data: reprint,
};
}
// return the label back to fm1 lane id 10001
const matReturnData = {
barcode: label?.data[0].Barcode,
laneId: 10001,
};
//console.log(matReturnData);
const { data: matReturn, error: matReturError } = (await tryCatch(
runProdApi({
endpoint:
"/public/v1.0/IssueMaterial/ReturnPartiallyConsumedManualMaterial",
data: [matReturnData],
})
)) as any;
if (!matReturn.success) {
createLog(
"error",
"materials",
"ocp",
`RN:${data.runningNumber}, Return Error ${matReturn.data.data.errors[0].message}`
);
return {
success: false,
message: `RN:${data.runningNumber}, Return Error ${matReturn.data.data.errors[0].message}`,
data: matReturn,
};
}
// consume to the lot provided.
const consumeLot = {
productionLot: data.lotNumber,
barcode: label?.data[0].Barcode,
};
const delay =
data.type === "lot"
? timeoutTrans * 1000
: target.getTime() - now.getTime();
const transfer = await transferMaterial(delay, data, consumeLot, newQty);
if (!transfer.success) {
return {
success: transfer.success,
message: transfer.message,
data: transfer.data,
};
}
const duration = intervalToDuration({ start: now, end: target });
const pretty = formatDuration(duration, {
format: ["hours", "minutes", "seconds"],
});
if (data.type === "eom") {
return {
success: true,
message: `RN:${data.runningNumber}: qty: ${newQty}, will be transfered to lot: ${data.lotNumber}, in ${pretty} `,
data: [],
};
} else {
return {
success: true,
message: `RN:${data.runningNumber}: qty: ${newQty}, was transfered to lot: ${data.lotNumber}`,
data: [],
};
}
};
const transferMaterial = async (
delay: number,
data: any,
consumeLot: any,
newQty: any
) => {
//console.log(data);
if (pendingJobs.has(data.runningNumber)) {
createLog(
"error",
"materials",
"ocp",
`${data.runningNumber} is pending to be transfered already`
);
return {
success: false,
message: `${data.runningNumber} is pending to be transfered already`,
data: [],
};
}
const scheduledFor = new Date(Date.now() + delay);
// sets the time out based on the type of transfer sent over.
const timeoutId = setTimeout(async () => {
try {
const { data: matConsume, error: matConsumeError } =
(await tryCatch(
runProdApi({
endpoint:
"/public/v1.0/IssueMaterial/ConsumeNonPreparedManualMaterial",
data: [consumeLot],
})
)) as any;
if (!matConsume?.success) {
createLog(
"error",
"materials",
"ocp",
`RN:${data.runningNumber}, Consume Error ${
matConsume?.data?.data?.errors?.[0]?.message ??
"Unknown"
}`
);
return; // still hits finally
}
createLog(
"info",
"materials",
"ocp",
`RN:${data.runningNumber}: qty: ${newQty}, was transferred to lot:${data.lotNumber}`
);
} catch (err) {
createLog(
"error",
"materials",
"ocp",
`RN:${data.runningNumber}, ${err}`
);
} finally {
// Always clear the pending entry, even if error
pendingJobs.delete(data.runningNumber);
}
}, delay);
pendingJobs.set(data.runningNumber, {
timeoutId,
runningNumber: data.runningNumber,
data,
consumeLot,
newQty,
scheduledFor,
});
// Immediately say we scheduled it
return {
success: true,
message: `Transfer for ${data.runningNumber} scheduled`,
data: [],
};
};
// setInterval(() => {
// console.log(pendingJobs);
// }, 5000);
// setTimeout(async () => {
// lotMaterialTransfer({
// runnungNumber: 603468,
// lotNumber: 24897,
// originalAmount: 380,
// level: 0.95,
// });
// }, 5000);

View File

@@ -1,12 +1,14 @@
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { serverSettings } from "../../../server/controller/settings/getSettings.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { machineCheck } from "../../../sqlServer/querys/ocp/machineId.js";
import { mmQuery } from "../../../sqlServer/querys/ocp/mainMaterial.js";
export const isMainMatStaged = async (lot: any) => {
const set = serverSettings.length === 0 ? [] : serverSettings;
// make staged false by deefault and error logged if theres an issue
let isStaged = false;
let isStaged = { message: "Material is staged", success: true };
const { data, error } = (await tryCatch(
query(
@@ -19,14 +21,17 @@ export const isMainMatStaged = async (lot: any) => {
(m: any) => m.HumanReadableId === lot.machineID
);
// we have a check on ksc side to ignore the tetra machine for now as its not updating in 2.0
if (machine.StagingMainMaterialMandatory === 0) {
if (!machine[0].StagingMainMaterialMandatory) {
createLog(
"info",
"mainMaterial",
"ocp",
`The machine dose not require mm to print and book in.`
);
return true;
return {
message: "Machine dose not require material to be staged",
success: true,
};
}
// strangly the lot is not always sending over in slc so adding this in for now to see what line is cauing this issue
@@ -51,20 +56,173 @@ export const isMainMatStaged = async (lot: any) => {
const res: any = r.data;
createLog(
"info",
"mainMaterial",
"ocp",
`MainMaterial results: ${JSON.stringify(res)}`
);
if (res[0].Staged >= 1) {
isStaged = true;
}
// if (res[0].noShortage === "good") {
// if (res[0].Staged >= 1) {
// isStaged = true;
// }
type CheckConditionArgs = {
results: any[];
filterFn: (n: any) => boolean;
failCondition: (n: any) => boolean;
failMessage: string;
successMessage: string;
lot: { lot: string | number };
};
const checkCondition = ({
results,
filterFn,
failCondition,
failMessage,
successMessage,
lot,
}: CheckConditionArgs): { message: string; success: boolean } => {
const subset = results.filter(filterFn);
if (subset.some(failCondition)) {
const failing = subset.filter(failCondition);
return {
message: `lot: ${lot.lot}, is missing: ${failing
.map(
(o: any) =>
`${o.MaterialHumanReadableId} - ${o.MaterialDescription}`
)
.join(",\n ")} ${failMessage}`,
success: false,
};
} else {
return { message: successMessage, success: true };
}
};
createLog("info", "mainMaterial", "ocp", `Maint material query ran.`);
const mainMaterial = res.find((n: any) => n.IsMainMaterial);
if (mainMaterial?.noMMShortage === "noMM") {
return {
message: `Main material: ${mainMaterial.MaterialHumanReadableId} - ${mainMaterial.MaterialDescription}: is not staged for ${lot.lot}`,
success: false,
};
}
// we need to filter the color stuff and then look for includes instead of a standard name. this way we can capture a everything and not a single type
// for manual consume color if active to check colors
const checkColorSetting = set.filter((n) => n.name === "checkColor");
// 2. Auto color
if (checkColorSetting[0].value === "1") {
// auto check
// 2. Auto color
const autoColor = checkCondition({
results: res,
lot,
filterFn: (n) =>
n.isManual &&
!("noPKGAutoShortage" in n) &&
!("noPKGManualShortage" in n), // pool = non-main, auto
failCondition: (n) => n.autoConsumeCheck === "autoConsumeNOK", // column = autoConsumeCheck
failMessage: "for autoconsume",
successMessage: "auto color is good",
});
if (!autoColor.success) return autoColor;
console.log(autoColor);
// 3. Manual color
const manualColor = checkCondition({
results: res,
lot,
filterFn: (n) =>
!n.IsMainMaterial &&
n.isManual &&
!("noPKGAutoShortage" in n) &&
!("noPKGManualShortage" in n), // pool = non-main, manual
failCondition: (n) => n.noManualShortage === "noOK", // column = noManualShortage
failMessage: "for manual material",
successMessage: "manual color is good",
});
if (!manualColor.success) return manualColor;
console.log(manualColor);
} else {
createLog(
"info",
"mainMaterial",
"ocp",
"Color check is not active."
);
}
// // if we want to check the packaging
const checkPKGSetting = set.filter((n) => n.name === "checkPKG");
if (checkPKGSetting[0].value === "1") {
const pkgAuto = checkCondition({
results: res,
lot,
filterFn: (n) =>
!n.IsMainMaterial &&
!n.isManual &&
"noPKGAutoShortage" in n,
failCondition: (n) => n.noPKGAutoShortage === "noAutoPkg",
failMessage: "for pkg",
successMessage: "auto PKG is good",
});
if (!pkgAuto.success) return pkgAuto;
console.log(pkgAuto);
// 5. Packaging manual
const pkgManual = checkCondition({
results: res,
lot,
filterFn: (n) =>
!n.IsMainMaterial &&
n.isManual &&
"noPKGManualShortage" in n,
failCondition: (n) => n.noPKGManualShortage === "noManPkg",
failMessage: "for pkg",
successMessage: "manual PKG is good",
});
if (!pkgManual.success) return pkgManual;
console.log(pkgManual);
} else {
createLog(
"info",
"mainMaterial",
"ocp",
"PKG check is not active."
);
}
// manual pkg
if (checkPKGSetting[0].value === "1") {
const packagingCheck = res.filter(
(n: any) =>
!n.IsMainMaterial &&
n.isManual &&
"noPKGManualShortage" in n
);
if (
packagingCheck.some(
(n: any) => n.noPKGManualShortage === "noManPkg"
)
) {
return (isStaged = {
message: `lot: ${lot.lot}, is missing: ${packagingCheck
.map(
(o: any) =>
`${o.MaterialHumanReadableId} - ${o.MaterialDescription}`
)
.join(",\n ")} for pkg`,
success: false,
});
}
} else {
createLog(
"info",
"mainMaterial",
"ocp",
"PKG check is not active."
);
}
} catch (err) {
createLog(
"error",

View File

@@ -1,6 +1,7 @@
import { createLog } from "../../../../../logger/logger.js";
import { pickedup } from "../../../../../ocme/controller/pickedup.js";
import { triggerScanner } from "../../../../../ocme/controller/triggerCamera.js";
import { serverSettings } from "../../../../../server/controller/settings/getSettings.js";
let lastProcessedTimestamp = 0;
@@ -10,13 +11,16 @@ export const palletSendTag = async (tagData: any) => {
* We will only trigger the camera and removal of pending tags
*/
const ocmeActive = serverSettings.filter((n) => n.name === "ocmeService");
const tagTime = new Date(tagData.state.timestamp).getTime();
// Only process if this is a new timestamp within the last 5 seconds
if (
tagTime !== lastProcessedTimestamp &&
Date.now() - tagTime <= 5000 &&
tagData.value
tagData.value &&
ocmeActive[0].value === "1"
) {
lastProcessedTimestamp = tagTime;
//console.log(tagData.state.timestamp);
@@ -47,7 +51,8 @@ export const palletSendTag = async (tagData: any) => {
if (
tagTime !== lastProcessedTimestamp &&
Date.now() - tagTime <= 5000 &&
!tagData.value
!tagData.value &&
ocmeActive[0].value === "1"
) {
await pickedup({ runningNr: 1234, all: true, areaFrom: "wrapper_1" });
}

View File

@@ -33,13 +33,13 @@ export const delieryInhouse = async (data: any) => {
"error",
"labeling",
"ocp",
`${data.printer.name}, Error:${res.data.Message}`
`${data.printer?.name}, Error:${res.data.Message}`
);
//printerUpdate(data.printer, 7, "Error while deliverying inhouse.");
return {
success: true,
message: `${data.printer.name} had an error while trying to deliver.`,
message: `${data.printer?.name} had an error while trying to deliver.`,
data: res.data,
};
} // label was just delivered

View File

@@ -0,0 +1,177 @@
import { Controller, Tag } from "st-ethernet-ip";
import { labelingProcess } from "../../labeling/labelProcess.js";
import { createLog } from "../../../../logger/logger.js";
let plcAddress = "192.168.193.97"; // zechetti 2
let lastProcessedTimestamp = 0;
let PLC = new Controller() as any;
const labelerTag = new Tag("N7[0]"); // change the car to a or b depending on what zechetti.
//const t = new Tag("CONV_M01_SHTL_UNLD_IN_FROM_PREV_CONV_TRACK_CODE.PAL_ORIGIN_LINE_N") // this is for the new zechette to reach the pallet form
let pollingInterval: any = null;
let heartbeatInterval: any = null;
let reconnecting = false;
let lastTag = 0;
// Track last successful read
let lastHeartbeat: number = Date.now();
export async function zechitti1Connect() {
try {
createLog(
"info",
"zechitti1",
"ocp",
`Connecting to PLC at ${plcAddress}...`
);
await PLC.connect(plcAddress, 0);
createLog("info", "zechitti1", "ocp", "Zechetti 2 connected.");
// Start polling tags
startPolling();
// Start heartbeat
// startHeartbeat();
// Handle disconnects/errors
PLC.on("close", () => {
console.warn("PLC connection closed.");
handleReconnect();
});
PLC.on("error", (err: any) => {
createLog("error", "zechitti1", "ocp", `PLC error: ${err.message}`);
handleReconnect();
});
} catch (err: any) {
createLog(
"error",
"zechitti1",
"ocp",
`Initial connection failed: ${err.message}`
);
handleReconnect();
}
}
function startPolling() {
if (pollingInterval) clearInterval(pollingInterval);
pollingInterval = setInterval(async () => {
try {
await PLC.readTag(labelerTag);
//lastHeartbeat = Date.now();
const tagTime: any = new Date(labelerTag.timestamp);
// so we make sure we are not missing a pallet remove it from the lastTag so we can get this next label correctly
if (
labelerTag.value == 0 &&
Date.now() - lastProcessedTimestamp >= 45000
) {
lastTag = labelerTag.value;
}
// if the tag is not zero and its been longer than 30 seconds and the last tag is not equal to the current tag we can print
if (
labelerTag.value !== 0 &&
lastTag !== labelerTag.value &&
tagTime !== lastProcessedTimestamp &&
Date.now() - lastProcessedTimestamp >= 30000
) {
lastProcessedTimestamp = tagTime;
lastTag = labelerTag.value;
console.log(
`Time since last check: ${
Date.now() - tagTime
}, greater than 30000, ${
Date.now() - lastProcessedTimestamp >= 30000
}, the line to be printed is ${labelerTag.value}`
);
//console.log(labelerTag);
const zechette = {
line: labelerTag.value.toString(),
printer: 22, // this is the id of the zechetti 2 to print we should move this to the db
printerName: "Zechetti1",
};
labelingProcess({ zechette: zechette });
}
} catch (err: any) {
createLog(
"error",
"zechitti1",
"ocp",
`Polling error: ${err.message}`
);
handleReconnect();
}
}, 1000);
}
// function startHeartbeat() {
// if (heartbeatInterval) clearInterval(heartbeatInterval);
// heartbeatInterval = setInterval(() => {
// const diff = Date.now() - lastHeartbeat;
// if (diff > 60000) {
// // 1 minute
// console.warn(`⚠️ Heartbeat timeout: no data for ${diff / 1000}s`);
// handleReconnect();
// }
// }, 10000); // check every 10s
// }
async function handleReconnect() {
if (reconnecting) return;
reconnecting = true;
if (pollingInterval) {
clearInterval(pollingInterval);
pollingInterval = null;
}
let delay = 2000; // start at 2s
let attempts = 0;
const maxAttempts = 10; // or limit by time, e.g. 2 min total
while (!PLC.connected && attempts < maxAttempts) {
attempts++;
createLog(
"info",
"zechitti1",
"ocp",
`Reconnect attempt ${attempts}/${maxAttempts} in ${
delay / 1000
}s...`
);
await new Promise((res) => setTimeout(res, delay));
try {
PLC = new Controller(); // fresh instance
await PLC.connect(plcAddress, 0);
createLog("info", "zechitti1", "ocp", "Reconnected to PLC!");
reconnecting = false;
startPolling();
return;
} catch (err: any) {
createLog(
"error",
"zechitti1",
"ocp",
`Reconnect attempt failed: ${err.message}`
);
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
}
}
if (!PLC.connected) {
createLog(
"error",
"zechitti1",
"ocp",
"Max reconnect attempts reached. Stopping retries."
);
reconnecting = false;
// optional: exit process or alert someone here
// process.exit(1);
}
}

View File

@@ -24,6 +24,9 @@ import { deleteLabels } from "../../globalUtils/dbCleanUp/labelCleanUp.js";
import bookInLabel from "./routes/labeling/bookIn.js";
import labelRatio from "./routes/labeling/getLabelRatio.js";
import resetRatio from "./routes/labeling/resetLabelRatio.js";
import materialTransferLot from "./routes/materials/lotTransfer.js";
import pendingTransfers from "./routes/materials/currentPending.js";
import { zechitti1Connect } from "./controller/specialProcesses/zechettis/zechetti1.js";
const app = new OpenAPIHono();
@@ -47,6 +50,9 @@ const routes = [
//dyco
dycoCon,
dycoClose,
// materials
materialTransferLot,
pendingTransfers,
] as const;
const setting = await db.select().from(settings);
@@ -66,6 +72,7 @@ app.all("/ocp/*", (c) => {
*/
const dycoActive = setting.filter((n) => n.name == "dycoConnect");
const ocpActive = setting.filter((n) => n.name === "ocpActive");
const zechetti = setting.filter((n) => n.name == "zechetti");
// run the printer update on restart just to keep everything good
// do the intnal connection to the dyco
@@ -75,6 +82,13 @@ setTimeout(() => {
}
}, 3 * 1000);
// if zechetti plc is wanted we will connect
setTimeout(() => {
if (zechetti[0]?.value === "1") {
zechitti1Connect();
}
}, 3 * 1000);
// check for printers being assigned
setInterval(() => {
if (ocpActive[0]?.value === "1") {
@@ -101,4 +115,5 @@ setInterval(() => {
setInterval(() => {
updatePrinters();
}, 1000 * 60 * 60 * 24);
export default app;

View File

@@ -0,0 +1,51 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { apiHit } from "../../../../globalUtils/apiHits.js";
import { pendingJobs } from "../../controller/materials/lotTransfer.js";
import { format, formatDuration, intervalToDuration } from "date-fns";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["ocp"],
summary: "Returns pending transfers",
method: "get",
path: "/pendingtransfers",
responses: responses(),
}),
async (c) => {
apiHit(c, { endpoint: "/pendingtransfers" });
const pending = Array.from(pendingJobs.entries()).map(
([runningNumber, job]) => {
const duration = intervalToDuration({
start: new Date(),
end: job.scheduledFor,
});
return {
runningNumber,
lot: job.data?.lotNumber,
newQty: job.newQty,
consumeLot: job.consumeLot,
scheduledFor: format(job.scheduledFor, "M/d/yyyy HH:mm"),
remainingMs: formatDuration(duration, {
format: ["hours", "minutes", "seconds"],
}),
};
}
);
//console.log(pending);
return c.json({
success: true,
message: "Current Pending trnasfers",
data: [pending],
});
}
);
export default app;

View File

@@ -0,0 +1,66 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { apiHit } from "../../../../globalUtils/apiHits.js";
import { lotMaterialTransfer } from "../../controller/materials/lotTransfer.js";
const app = new OpenAPIHono({ strict: false });
const LotTransfer = z.object({
runningNumber: z.number().openapi({ example: 1234 }),
lotNumber: z.number().openapi({ example: 1235 }),
originalAmount: z.number().openapi({ example: 457 }),
level: z.number().openapi({ examples: [0.24, 0.5, 0.75, 0.95] }),
});
app.openapi(
createRoute({
tags: ["ocp"],
summary: "Transfers a gaylord of material to provided lot",
method: "post",
path: "/materiallottransfer",
request: {
body: { content: { "application/json": { schema: LotTransfer } } },
},
responses: responses(),
}),
async (c) => {
//const hours = c.req.query("hours");
const { data: bodyData, error: bodyError } = await tryCatch(
c.req.json()
);
apiHit(c, { endpoint: "/materiallottransfer", lastBody: bodyData });
if (bodyError) {
return c.json({
success: false,
message: "You are missing data",
});
}
const { data: transferMaterial, error: transferError } = await tryCatch(
lotMaterialTransfer(bodyData)
);
if (transferError) {
//console.log(transferError);
return c.json({
success: false,
message:
"There was an error transfering the material to the next lot.",
data: transferError,
});
}
console.log(transferMaterial);
return c.json({
success: transferMaterial?.success,
message: transferMaterial?.message,
data: transferMaterial?.data,
});
}
);
export default app;

View File

@@ -50,6 +50,20 @@ const newProdRoles: any = [
},
// logistics
{
name: "planner",
description: "Planning role.",
roles: [
"Administration\\Scan\\ApiConsumer",
"Administration\\Printing\\ApiConsumer",
"Logistics\\Warehousing\\ProcessAdmin",
"Manufacturing\\IssueMaterial\\ProcessAdmin",
"Manufacturing\\ProductionLabelling\\ProcessAdmin",
"DemandManagement\\Forecast\\ProcessAdmin",
"DemandManagement\\Order\\ProcessAdmin",
],
rolesLegacy: [55, 95, 15, 105, 145, 9],
},
// plant manager
{
name: "plantManager",

View File

@@ -43,9 +43,9 @@ export const wrapperStuff = async (tagData: any) => {
"rfid",
`There are ${tagData.length} tags and this ${
tagData[0].reader
} only allows 1 tag to create a label: tag ${tagData[0].tag}, ${
tagData[1].tag
}, ${tagData.length === 3 && tagData[2].tag}`
} only allows 1 tag to create a label: tag ${tagData
.map((o: any) => `${o.tag}`)
.join(",\n ")}`
);
const tag = { ...tagData[0], runningNr: 0 };
//tagStuff([tag]);

View File

@@ -7,6 +7,7 @@ import updateReader from "./route/updateReader.js";
import manualTrigger from "./route/manualTagRead.js";
import getReaders from "./route/getReaders.js";
import resetRatio from "./route/resetRatio.js";
import { monitorRfidTags } from "./utils/monitorTags.js";
const app = new OpenAPIHono();
const routes = [
@@ -24,4 +25,9 @@ const appRoutes = routes.forEach((route) => {
app.route("/rfid", route);
});
// monitor every 5 min tags older than 6 hours to remove the line they were so we reduce the risk of them being labeled with the wrong info
setInterval(() => {
monitorRfidTags();
}, 5 * 1000 * 60);
export default app;

View File

@@ -0,0 +1,22 @@
import { and, lt, ne, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { rfidTags } from "../../../../database/schema/rfidTags.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
/**
* This will monitor tags that are older than 6hours and are still linked to a line.
* it will then remove the line from the last area in as we will asume it dose not exist.
*/
export const monitorRfidTags = async () => {
const { data, error } = await tryCatch(
db
.update(rfidTags)
.set({ lastareaIn: "miss scanned" })
.where(
and(
ne(rfidTags.lastareaIn, "wrapper1"), // not equal to 'wrapper1'
lt(rfidTags.lastRead, sql`NOW() - INTERVAL '6 hours'`) // older than 6 hours)
)
)
);
};

View File

@@ -1,17 +1,57 @@
import {db} from "../../../../../database/dbclient.js";
import {settings} from "../../../../../database/schema/settings.js";
import {createLog} from "../../../logger/logger.js";
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import axios from "axios";
import { db } from "../../../../../database/dbclient.js";
import { settings } from "../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import type { Settings } from "../../../../types/settings.js";
export let serverSettings: Settings[];
export const getSettings = async () => {
createLog("info", "lst", "server", "Settings are being grabbed");
let serverSettings;
const settingsType = process.env.LST_USE_GO;
createLog(
"info",
"lst",
"server",
`Settings are being grabbed from: ${
settingsType === "true" ? "Go backend" : "Localbackend"
}`
);
try {
serverSettings = await db.select().from(settings);
//.where(sql`${userRole} = ANY(roles)`);
} catch (error) {
createLog("error", "lst", "server", "There was an error getting the settings");
throw new Error("There was an error getting the settings");
const baseUrl = process.env.LST_BASE_URL;
if (settingsType === "true") {
const { data, error } = (await tryCatch(
axios.get(`${baseUrl}/api/v1/settings`)
)) as any;
if (error) {
createLog(
"error",
"lst",
"server",
"There was an error getting the settings"
);
throw new Error("There was an error getting the settings");
}
serverSettings = data.data.data;
}
if (settingsType !== "true") {
try {
serverSettings = (await db.select().from(settings)) as any;
//.where(sql`${userRole} = ANY(roles)`);
} catch (error) {
createLog(
"error",
"lst",
"server",
"There was an error getting the settings"
);
throw new Error("There was an error getting the settings");
}
}
return serverSettings;
};

View File

@@ -198,6 +198,13 @@ const newSettings = [
serviceBelowsTo: "system",
roleToChange: "admin",
},
{
name: "rifd",
value: `0`,
description: "This is for dayton to be runnning rfid pallet tracking.",
serviceBelowsTo: "logistics",
roleToChange: "admin",
},
// ocp
{
@@ -244,7 +251,28 @@ const newSettings = [
serviceBelowsTo: "logistics",
roleToChange: "admin",
},
{
name: "zechetti",
value: `0`,
description: "Are we going to be running the Zechetti plcs",
serviceBelowsTo: "logistics",
roleToChange: "admin",
},
// temp settings can be deleted at a later date once that code is removed
{
name: "checkColor",
value: `0`,
description: "Checks autoconsume and manual consume color",
serviceBelowsTo: "admin",
roleToChange: "admin",
},
{
name: "checkPKG",
value: `0`,
description: "Checks checks if we have enough packaging or not",
serviceBelowsTo: "admin",
roleToChange: "admin",
},
{
name: "siloAdjMigrations",
value: `0`,

View File

@@ -61,7 +61,14 @@ const newSubModules = [
description: "",
link: "/materialHelper/consumption",
icon: "Package",
roles: ["technician", "supervisor", "manager", "admin", "systemAdmin"],
roles: [
"viewer",
"technician",
"supervisor",
"manager",
"admin",
"systemAdmin",
],
active: false,
subSubModule: [],
},

View File

@@ -6,6 +6,7 @@ import { settings } from "../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { installed } from "../../index.js";
import { checkHostnamePort } from "../../globalUtils/pingServer.js";
import { serverSettings } from "../server/controller/settings/getSettings.js";
let pool: any;
let connected: boolean = false;
@@ -23,10 +24,16 @@ export const initializeProdPool = async () => {
return { success: false, message: "The server is not installed." };
}
const dbServer = await db
.select()
.from(settings)
.where(eq(settings.name, "dbServer"));
// const dbServer = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
// the move to the go version for settings
const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer"
) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
if (!serverUp) {
@@ -44,10 +51,14 @@ export const initializeProdPool = async () => {
}
// make sure the server is not set to localhost this will prevent some weird issues later but can be localhost on the dev
const serverLoc = await db
.select()
.from(settings)
.where(eq(settings.name, "dbServer"));
// const serverLoc = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
const serverLoc = serverSettings.filter(
(n: any) => n.name === "dbServer"
) as any;
if (
serverLoc[0].value === "localhost" &&
process.env.NODE_ENV !== "development"
@@ -134,10 +145,14 @@ export async function query(queryToRun: string, name: string) {
/**
* Just an extra catch incase someone tried to run a query while we were not connected to the server or sql server
*/
const dbServer = await db
.select()
.from(settings)
.where(eq(settings.name, "dbServer"));
// const dbServer = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "dbServer"));
const dbServer = serverSettings.filter(
(n: any) => n.name === "dbServer"
) as any;
const serverUp = await checkHostnamePort(`${dbServer[0].value}:1433`);
if (!serverUp) {
@@ -170,10 +185,13 @@ export async function query(queryToRun: string, name: string) {
/**
* We no longer need to send over the plant token change as we do it inside the query function.
*/
const plantToken = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
// const plantToken = await db
// .select()
// .from(settings)
// .where(eq(settings.name, "plantToken"));
const plantToken = serverSettings.filter(
(n: any) => n.name === "plantToken"
) as any;
const query = queryToRun.replaceAll("test1", plantToken[0].value);
try {

View File

@@ -1,4 +1,6 @@
export const activeArticle = `
use AlplaPROD_test1
SELECT V_Artikel.IdArtikelvarianten,
V_Artikel.Bezeichnung,
V_Artikel.ArtikelvariantenTypBez,
@@ -38,7 +40,17 @@ V_Artikel.ArtikelvariantenTypBez = 'Glue' or
V_Artikel.ArtikelvariantenTypBez = 'Top Frame' or
V_Artikel.ArtikelvariantenTypBez = 'IML Label' or
V_Artikel.ArtikelvariantenTypBez = 'Purch EBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'Purch Spout'
V_Artikel.ArtikelvariantenTypBez = 'Purchased Spout' or
V_Artikel.ArtikelvariantenTypBez = 'Gaylord' or
V_Artikel.ArtikelvariantenTypBez = 'Misc. Packaging' or
V_Artikel.ArtikelvariantenTypBez = 'Sleeve' or
V_Artikel.ArtikelvariantenTypBez = 'Plastic Bag' or
V_Artikel.ArtikelvariantenTypBez = 'Purch Spout' or
V_Artikel.ArtikelvariantenTypBez = 'Seal' or
V_Artikel.ArtikelvariantenTypBez = 'Tape' or
V_Artikel.ArtikelvariantenTypBez = 'Box' or
V_Artikel.ArtikelvariantenTypBez = 'Label IML' or
V_Artikel.ArtikelvariantenTypBez = 'Pallet Runner'
THEN 'PKG'
WHEN V_Artikel.ArtikelvariantenTypBez='HD-PE' or
V_Artikel.ArtikelvariantenTypBez='HD-PE PCR' or
@@ -72,7 +84,8 @@ V_Artikel.ArtikelvariantenTypBez = 'Purchased Caps' or
V_Artikel.ArtikelvariantenTypBez = 'Purchased_preform'
THEN 'Purchased_preform'
When
V_Artikel.ArtikelvariantenTypBez = 'Closures'
V_Artikel.ArtikelvariantenTypBez = 'Closures' or
V_Artikel.ArtikelvariantenTypBez = 'Cap'
THEN 'Caps'
When
V_Artikel.ArtikelvariantenTypBez = 'Dummy'
@@ -90,7 +103,8 @@ V_Artikel.ProdBereichBez = 'IM-Caps' or
V_Artikel.ProdBereichBez = 'IM-PET' or
V_Artikel.ProdBereichBez = 'PRINT OFFICE' or
V_Artikel.ProdBereichBez = 'EBM' or
V_Artikel.ProdBereichBez = 'ISBM'
V_Artikel.ProdBereichBez = 'ISBM' or
V_Artikel.ProdBereichBez = 'IM-Finishing'
Then 'FG'
Else 'not Defined Profit Center'
end,
@@ -102,16 +116,17 @@ sales.[KdArtBez] as CustomerArticleDescription,
round(V_Artikel.Zyklus, 2) as CycleTime,
Sypronummer as salesAgreement,
V_Artikel.ProdArtikelBez as ProductFamily
,REPLACE(pur.UOM,'UOM:','') as UOM
--,*
FROM AlplaPROD_test1.dbo.V_Artikel (nolock)
FROM dbo.V_Artikel (nolock)
join
AlplaPROD_test1.dbo.V_Artikelvarianten on AlplaPROD_test1.dbo.V_Artikel.IdArtikelvarianten =
AlplaPROD_test1.dbo.V_Artikelvarianten.IdArtikelvarianten
dbo.V_Artikelvarianten (nolock) on dbo.V_Artikel.IdArtikelvarianten =
dbo.V_Artikelvarianten.IdArtikelvarianten
join
AlplaPROD_test1.dbo.V_FibuKonten_BASIS on AlplaPROD_test1.dbo.V_Artikelvarianten.IdFibuKonto =
AlplaPROD_test1.dbo.V_FibuKonten_BASIS.IdFibuKonto
dbo.V_FibuKonten_BASIS (nolock) on dbo.V_Artikelvarianten.IdFibuKonto =
dbo.V_FibuKonten_BASIS.IdFibuKonto
-- adding in the sales price
@@ -125,7 +140,7 @@ left join
,[KdArtNr]
,[KdArtBez]
--,*
from AlplaPROD_test1.dbo.T_HistoryVK (nolock)
from dbo.T_HistoryVK (nolock)
where
--GueltigabDatum > getDate() - 120
--and
@@ -133,7 +148,26 @@ left join
and StandardKunde = 1 -- default address
) a
where RN = 1) as sales
on AlplaPROD_test1.dbo.V_Artikel.IdArtikelvarianten = sales.av
on dbo.V_Artikel.IdArtikelvarianten = sales.av
/* adding the purchase price info */
left join
(select * from
(select
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
IdArtikelvarianten as av
,GueltigabDatum as validDate
,EKPreis as price
,LiefArtNr as supplierNr
,case when Bemerkung is not null and Bemerkung like '%UOM:%' then LEFT(Bemerkung, CHARINDEX(' ', Bemerkung)) else 'UOM:1' end as UOM
,Bemerkung
--,*
from dbo.T_HistoryEK (nolock)
where
StandardLieferant = 1 -- default address
) a
where RN = 1) as pur
on dbo.V_Artikel.IdArtikelvarianten = pur.av
where V_Artikel.aktiv = 1

Some files were not shown because too many files have changed in this diff Show More