Compare commits

...

163 Commits

Author SHA1 Message Date
d27611d035 feat(servers): all servers in v2 meow 2025-04-02 21:26:32 -05:00
f771db6034 fix(ocp): delivery changes to happen after bookin is done 2025-04-02 21:26:15 -05:00
4f3b5d75a2 refactor(ocme): crashing for no reason added ?? 2025-04-02 21:25:49 -05:00
a1f62a3e51 fix(ti intergraion): chagnes to the special instructions 2025-04-02 21:25:30 -05:00
de0ee3a61c fix(sendmail): if server not installed just stop 2025-04-02 21:25:07 -05:00
b48dd8fa15 style(auth): format changes to the new config only 2025-04-02 21:24:51 -05:00
3355eb389c test(services): testing remove restart and stop 2025-04-02 21:24:23 -05:00
b2683d0429 feat(datamart): total inv migrated over 2025-04-02 21:23:42 -05:00
7e484a0f90 chore(release): bump build number to 147 2025-04-02 21:02:05 -05:00
77abaed60e chore(release): bump build number to 146 2025-04-02 16:38:47 -05:00
d10770bc49 chore(release): bump build number to 145 2025-04-02 15:29:10 -05:00
1fee4b719b chore(release): bump build number to 144 2025-04-02 14:58:43 -05:00
1dce3dccdc chore(release): bump build number to 143 2025-04-02 14:50:28 -05:00
3babf8a749 chore(release): bump build number to 142 2025-04-02 13:17:42 -05:00
29c9f2d1be chore(release): bump build number to 141 2025-04-02 12:17:34 -05:00
459b0f287c chore(release): bump build number to 140 2025-04-02 08:45:48 -05:00
7535e15337 chore(release): bump build number to 139 2025-04-02 08:15:43 -05:00
68dac0dd28 chore(release): bump build number to 138 2025-04-02 07:56:22 -05:00
f442cedff2 chore(release): bump build number to 137 2025-04-02 07:51:35 -05:00
63e005b790 feat(servers): added in jci lima sherman 2025-04-01 18:50:35 -05:00
814861e59c feat(ocp): added dual printing back in 2025-04-01 18:50:03 -05:00
dd62ceb133 fix(updater): fixed the iowa2 server so it has the correct port 2025-04-01 18:49:48 -05:00
e335225807 build(update server): added cheaters in to get rid of the other service to readd it 2025-04-01 18:49:27 -05:00
3e54685a7e chore(release): bump build number to 136 2025-04-01 16:30:06 -05:00
f481197d6e feat(new server): added mcd to the server setup 2025-04-01 16:23:13 -05:00
053c05c1a7 fix(perms): fixed the location for the test update 2025-04-01 16:22:52 -05:00
8bdbc4995c refactor(ocp): more work on perfecting dyco and labeling 2025-04-01 16:22:21 -05:00
bc2336e46d refactor(compile): changes to keep the last 20 builds 2025-04-01 16:22:01 -05:00
544e82c01d fix(update server missing migrate): missing 2025-04-01 16:21:15 -05:00
44507d41c4 feat(datamart): initial get active query migrated 2025-04-01 16:20:18 -05:00
ee3026fa7c fix(ocme): cycle count typos 2025-04-01 16:19:25 -05:00
5c642805b1 feat(notifications): migrated all from v1 2025-04-01 16:18:48 -05:00
0d06dae6de chore(release): bump build number to 135 2025-04-01 13:37:46 -05:00
8a639ceaf8 chore(release): bump build number to 134 2025-04-01 10:14:50 -05:00
8c6dc5f690 chore(release): bump build number to 133 2025-04-01 09:50:03 -05:00
16b39fd386 chore(release): bump build number to 132 2025-03-31 20:36:09 -05:00
075bba95ee chore(release): bump build number to 131 2025-03-31 20:33:34 -05:00
6ec9f5827c chore(release): bump build number to 130 2025-03-31 10:23:34 -05:00
93941723cc chore(release): bump build number to 129 2025-03-31 10:18:08 -05:00
fb41fb1dd1 chore(release): bump build number to 128 2025-03-30 18:51:23 -05:00
6d5bcde88e chore(release): bump build number to 127 2025-03-30 18:48:18 -05:00
1156bbd223 chore(release): bump build number to 126 2025-03-30 10:13:01 -05:00
bfd4aec4eb chore(release): 2.11.0 2025-03-30 10:12:17 -05:00
63b1151cb7 fix(misc): work on ocp to improve the errors that were missed and better logging 2025-03-30 10:11:58 -05:00
a5dee58223 refactor(server): removed console logs 2025-03-30 10:11:29 -05:00
9784072aab refactor(format changes): changes to the file formats to match across computers 2025-03-30 10:10:53 -05:00
f9096b54f5 fix(admin auth): added in role change for v1 2025-03-30 10:10:01 -05:00
11e5cf4d86 refactor(lst): added huston backin 2025-03-30 10:09:32 -05:00
a647d05d3b chore(release): bump build number to 125 2025-03-30 10:03:20 -05:00
098c477119 chore(release): bump build number to 124 2025-03-30 09:44:19 -05:00
3228ad7892 feat(lst): script added for the test server to readd the permissions once it boots up 2025-03-30 08:41:55 -05:00
c27ad7cf6a refactor(lst): added in a removal of old files so we can keep the server clean 2025-03-30 08:41:26 -05:00
a48d4bd5af feat(auth): admin user updates added
if a password change happens then an email will be sent to the user.
2025-03-30 08:40:49 -05:00
09c0825194 chore(release): bump build number to 123 2025-03-30 05:09:52 -05:00
28fbf2c1e4 chore(release): bump build number to 122 2025-03-29 18:26:57 -05:00
af5dc9266f chore(release): bump build number to 121 2025-03-29 12:16:23 -05:00
b903c8ee2d chore(release): bump build number to 120 2025-03-29 12:15:02 -05:00
035eda9aa8 chore(release): bump build number to 119 2025-03-29 12:13:16 -05:00
1f7c33d871 chore(release): bump build number to 118 2025-03-29 07:54:44 -05:00
d046c4ea41 chore(release): bump build number to 117 2025-03-29 07:39:34 -05:00
88258baf9d chore(release): bump build number to 116 2025-03-28 14:50:34 -05:00
82acfcc4a9 chore(release): bump build number to 116 2025-03-28 14:20:37 -05:00
ba3d721940 refactor(ocp): lots of work for rfid and dyco contorl 2025-03-27 21:12:22 -05:00
27d6b6e884 refactor(ocme): added new error in posting where we know when the pallet is not instock 2025-03-27 21:11:42 -05:00
2c8d1fb710 fix(logger): changes to get the most recent verse aged 2025-03-27 21:10:57 -05:00
b5de6445b3 feat(rfid): work on the readers and there functions 2025-03-27 21:08:05 -05:00
f9f68ce969 refactor(logger): changed log level to be in the env file vs hardcoded 2025-03-27 21:07:15 -05:00
0ced135ec3 feat(updater): added in a delete function for the server side 2025-03-27 21:06:45 -05:00
7b1c6e1361 refactor(ocp): finished the dashboard and move logs and labels to a tab style 2025-03-27 21:06:25 -05:00
e3ba45ae13 fix(ocme): fixed to make sure we can always just update a runnning nunmber 2025-03-27 21:05:49 -05:00
ac7859fda3 chore(release): bump build number to 115 2025-03-27 20:21:24 -05:00
fb31ae79d1 chore(release): bump build number to 114 2025-03-27 18:50:04 -05:00
ff1dfdde68 chore(release): bump build number to 113 2025-03-27 18:44:11 -05:00
f0b9bd599a chore(release): bump build number to 112 2025-03-27 18:42:43 -05:00
f3103d8c1a chore(release): bump build number to 111 2025-03-27 18:22:13 -05:00
d557728fa2 chore(release): bump build number to 110 2025-03-27 17:26:30 -05:00
d58cb5286e chore(release): bump build number to 109 2025-03-27 17:03:39 -05:00
7d4733896e chore(release): bump build number to 108 2025-03-27 16:48:14 -05:00
175c7226ed chore(release): bump build number to 107 2025-03-27 15:00:51 -05:00
c32547ceb8 chore(release): bump build number to 106 2025-03-27 14:47:42 -05:00
a01c0566c2 chore(release): bump build number to 105 2025-03-27 14:46:43 -05:00
ca4106945b chore(release): bump build number to 104 2025-03-27 14:45:41 -05:00
1386e0f00f chore(release): bump build number to 103 2025-03-27 14:44:10 -05:00
290f20b86b chore(release): bump build number to 102 2025-03-27 14:33:34 -05:00
52171c87fc chore(release): bump build number to 101 2025-03-27 14:31:22 -05:00
c474536992 chore(release): bump build number to 100 2025-03-27 14:07:33 -05:00
600a989226 chore(release): bump build number to 99 2025-03-27 11:55:35 -05:00
ea7801fccf chore(release): bump build number to 98 2025-03-27 10:59:53 -05:00
c03b61f48a chore(release): bump build number to 97 2025-03-27 10:33:59 -05:00
ca552d5587 chore(release): bump build number to 96 2025-03-27 10:12:23 -05:00
278c5538bc chore(release): bump build number to 95 2025-03-27 09:21:15 -05:00
f39ae0f590 chore(release): bump build number to 94 2025-03-27 09:10:01 -05:00
b4e0f4c666 chore(release): bump build number to 93 2025-03-26 22:12:03 -05:00
db66302415 chore(release): 2.10.0 2025-03-26 22:11:27 -05:00
90e9bb0ff6 refactor(rfid): refactored the way the wrapper works to indlude backup dyco plan 2025-03-26 22:10:39 -05:00
bd11feb136 refactor(rfid): refactored station 3 (lines) and complete logic 2025-03-26 22:10:05 -05:00
96e7f742fe refactor(ocme): removed some info logs as ocme calls alot 2025-03-26 22:09:23 -05:00
eb051d51f2 feat(notify): intial nofity system added to monitor crashes and rfid wrapper 2025-03-26 22:08:53 -05:00
7a1a4773e7 feat(ocp): create and book in plus dyco controller implemented 2025-03-26 22:07:19 -05:00
878e650e62 chore(release): bump build number to 92 2025-03-26 22:05:21 -05:00
a31e7ea163 chore(release): bump build number to 91 2025-03-26 16:17:53 -05:00
04aa943920 chore(release): bump build number to 90 2025-03-26 16:00:56 -05:00
af076b8e27 chore(release): bump build number to 89 2025-03-26 15:39:57 -05:00
c0a0589b3c chore(release): bump build number to 88 2025-03-26 15:19:55 -05:00
509ef84726 chore(release): bump build number to 87 2025-03-26 15:18:53 -05:00
5ab813f378 chore(release): bump build number to 86 2025-03-26 12:44:17 -05:00
5d61eb879e chore(release): bump build number to 85 2025-03-26 12:42:58 -05:00
2d4b1db5f4 fix(logistics): correction to the lane grab 2025-03-26 09:16:16 -05:00
58f7b4322d feat(logistics): added in return material by lane name and gets lane id 2025-03-26 08:36:47 -05:00
3b5e82fdc1 feat(ocp): added wrappercard into the mix 2025-03-25 18:57:00 -05:00
f9cd3fb881 feat(ocp): prodlink check added 2025-03-25 18:56:36 -05:00
51267f5202 fix(ocme): fixed the camera routes 2025-03-25 18:55:57 -05:00
ceaa25f31e feat(ocp): added labeling logs in 2025-03-25 18:55:23 -05:00
416254353c chore(release): bump build number to 84 2025-03-25 18:21:47 -05:00
7f946c095b chore(release): bump build number to 83 2025-03-25 18:03:32 -05:00
c2aa69ab0a chore(release): bump build number to 82 2025-03-25 18:02:14 -05:00
250988800e chore(release): bump build number to 81 2025-03-25 17:57:13 -05:00
cacfd6d2e0 chore(release): bump build number to 80 2025-03-25 17:49:07 -05:00
8d2721b0c2 chore(release): bump build number to 79 2025-03-25 17:44:41 -05:00
36cdc7b4bf chore(release): bump build number to 78 2025-03-25 17:39:26 -05:00
4d4d6fc7a4 chore(release): bump build number to 77 2025-03-25 15:48:28 -05:00
030f9f9aac refactor(ocp): moved printers to there own folder to keep everything clean 2025-03-25 13:39:39 -05:00
9e9a56cbb1 feat(ocp): add lots with scroll view was added 2025-03-25 13:39:18 -05:00
b01980e1c5 fix(ocme): fixed some import errors 2025-03-25 12:48:18 -05:00
fe0c500dcf feat(server): clearn up code 2025-03-25 12:47:52 -05:00
8a040d15db fix(servers): fixed the weird conflict import that happened 2025-03-25 12:47:36 -05:00
f90066c090 feat(ocp): added in printers get and add 2025-03-25 12:47:15 -05:00
04eb2e3e14 refactor(tcpserver): just the tcp server doing something 2025-03-25 07:58:37 -05:00
90fb0d364d Merge branch 'main' of https://git.tuffraid.net/cowch/lstV2 2025-03-25 07:57:55 -05:00
e6e1cecce3 refactor(ocme): corrections to endpoints to work with ocnme as intneeded 2025-03-25 07:54:58 -05:00
73aa95a693 refactor(ocme): cleaup on pickedup 2025-03-25 07:54:22 -05:00
b9f19095cb refactor(ocme): clean up on the getInfo endpoint 2025-03-25 07:54:00 -05:00
dcb56d4274 fix(ocme): corrections to posting data for the response was added 2025-03-25 07:53:30 -05:00
bc1821132e feat(ocme): manual camera trigger for the wrapper added 2025-03-25 07:52:40 -05:00
2551d6c907 refactor(updateserver): removed ocme from starting back up as it was migrated 2025-03-25 07:41:10 -05:00
adf0880659 refactor(server): changed to log only when in dev, and removed the redirect of the url 2025-03-25 07:40:15 -05:00
5149de3199 ci(lst): changes made to the settings file to work across all pvs 2025-03-25 07:39:32 -05:00
c71b514d9a refactor(frontend): prettier change to formatting 2025-03-25 07:27:45 -05:00
9254e52808 chore(release): bump build number to 76 2025-03-25 06:17:31 -05:00
b8c028a6c1 chore(release): bump build number to 75 2025-03-24 20:20:55 -05:00
529e922485 chore(release): bump build number to 74 2025-03-24 20:13:07 -05:00
5201012235 chore(release): bump build number to 73 2025-03-24 20:08:59 -05:00
abe53b8f5d chore(release): bump build number to 72 2025-03-24 20:06:15 -05:00
836f3e294b chore(release): bump build number to 71 2025-03-24 19:56:03 -05:00
96abef762b chore(release): bump build number to 70 2025-03-24 19:40:39 -05:00
2c227f9428 chore(release): bump build number to 69 2025-03-24 18:33:12 -05:00
46647687dc chore(release): bump build number to 68 2025-03-24 18:23:24 -05:00
cb01ef1af1 chore(release): bump build number to 67 2025-03-24 17:16:23 -05:00
b3b5fcec65 chore(release): bump build number to 66 2025-03-24 17:14:25 -05:00
3a4dc47a36 chore(release): bump build number to 65 2025-03-24 17:06:23 -05:00
63177b523e chore(release): bump build number to 64 2025-03-24 17:00:20 -05:00
e1cad027d2 chore(release): bump build number to 63 2025-03-24 16:42:53 -05:00
c1cc355f4f chore(release): bump build number to 62 2025-03-24 16:40:50 -05:00
5ed67f3fc0 chore(release): bump build number to 61 2025-03-24 16:28:32 -05:00
57e82d2360 chore(release): bump build number to 60 2025-03-24 16:26:04 -05:00
9395ec6cd4 chore(release): bump build number to 59 2025-03-24 15:50:29 -05:00
0475bb30f9 chore(release): bump build number to 58 2025-03-24 15:49:37 -05:00
6843368c36 chore(release): bump build number to 57 2025-03-24 15:47:16 -05:00
335ea2deca chore(release): bump build number to 56 2025-03-24 15:39:33 -05:00
96814c1115 Merge branch 'main' of https://git.tuffraid.net/cowch/lstV2 2025-03-24 15:31:35 -05:00
6dd9ed848b test(ocme): lots of changes to get it working in production 2025-03-24 15:31:31 -05:00
0c5fc1dfb0 chore(release): bump build number to 55 2025-03-24 15:28:10 -05:00
5886bea85d chore(release): bump build number to 52 2025-03-24 11:43:52 -05:00
184 changed files with 29736 additions and 10298 deletions

View File

@@ -1,6 +1,8 @@
{ {
"editor.defaultFormatter": "esbenp.prettier-vscode", "editor.defaultFormatter": "esbenp.prettier-vscode",
"workbench.colorTheme": "Default Dark+",
"prettier.tabWidth": 4,
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true, "editor.formatOnSave": true,
"[javascript]": { "[javascript]": {
"editor.formatOnSave": true "editor.formatOnSave": true

View File

@@ -1,5 +1,168 @@
# All CHanges to LST can be found below. # All CHanges to LST can be found below.
## [2.11.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.10.0...v2.11.0) (2025-03-30)
### 🌟 Enhancements
* **auth:** admin user updates added ([a48d4bd](https://git.tuffraid.net/cowch/lstV2/commits/a48d4bd5afb53d9242a8ba13fef5dab2e2d0e4fe))
* **lst:** script added for the test server to readd the permissions once it boots up ([3228ad7](https://git.tuffraid.net/cowch/lstV2/commits/3228ad789206a2fd477060fa6849850af52523b1))
* **rfid:** work on the readers and there functions ([b5de644](https://git.tuffraid.net/cowch/lstV2/commits/b5de6445b382e7f7d7c8ce5893138a1a86a56eb2))
* **updater:** added in a delete function for the server side ([0ced135](https://git.tuffraid.net/cowch/lstV2/commits/0ced135ec38590fd599e9a45542768b3790eb3db))
### 📝 Chore
* **release:** bump build number to 100 ([c474536](https://git.tuffraid.net/cowch/lstV2/commits/c474536992659b7ca789a5841441f1634de7a6dc))
* **release:** bump build number to 101 ([52171c8](https://git.tuffraid.net/cowch/lstV2/commits/52171c87fc4b5f15d964e6695dd4d9c2710ab961))
* **release:** bump build number to 102 ([290f20b](https://git.tuffraid.net/cowch/lstV2/commits/290f20b86b81fa82458c0317215dd614f3de73c2))
* **release:** bump build number to 103 ([1386e0f](https://git.tuffraid.net/cowch/lstV2/commits/1386e0f00f54a54f5b7645f2c1c23ebdd3228e59))
* **release:** bump build number to 104 ([ca41069](https://git.tuffraid.net/cowch/lstV2/commits/ca4106945b2c09ee495821c8c2452b5254e4df37))
* **release:** bump build number to 105 ([a01c056](https://git.tuffraid.net/cowch/lstV2/commits/a01c0566c2413e2ba68106e26eb9dc09c415ba92))
* **release:** bump build number to 106 ([c32547c](https://git.tuffraid.net/cowch/lstV2/commits/c32547ceb831c510797da8c3d99745bd2a843a1b))
* **release:** bump build number to 107 ([175c722](https://git.tuffraid.net/cowch/lstV2/commits/175c7226eda3cdd3d63febf6e058171c72d0cb5e))
* **release:** bump build number to 108 ([7d47338](https://git.tuffraid.net/cowch/lstV2/commits/7d4733896e5f9578c60133cb5c871139e1ac332c))
* **release:** bump build number to 109 ([d58cb52](https://git.tuffraid.net/cowch/lstV2/commits/d58cb5286e85f49e327134f2b957c56fdd1a319a))
* **release:** bump build number to 110 ([d557728](https://git.tuffraid.net/cowch/lstV2/commits/d557728fa25c33a95ae99559ddc4ce7b2f7a0cad))
* **release:** bump build number to 111 ([f3103d8](https://git.tuffraid.net/cowch/lstV2/commits/f3103d8c1a027d3364540b9044a298346bce1ead))
* **release:** bump build number to 112 ([f0b9bd5](https://git.tuffraid.net/cowch/lstV2/commits/f0b9bd599a9da86b36dd952b8d3dc47116add380))
* **release:** bump build number to 113 ([ff1dfdd](https://git.tuffraid.net/cowch/lstV2/commits/ff1dfdde68335907e8e24f0f011cc611d5de3b0c))
* **release:** bump build number to 114 ([fb31ae7](https://git.tuffraid.net/cowch/lstV2/commits/fb31ae79d125c6b672608b56b02d9c7239250d19))
* **release:** bump build number to 115 ([ac7859f](https://git.tuffraid.net/cowch/lstV2/commits/ac7859fda34606874a05f8665fe7478cf79d7037))
* **release:** bump build number to 116 ([88258ba](https://git.tuffraid.net/cowch/lstV2/commits/88258baf9d71cba38f987851e62363563f28d6ae))
* **release:** bump build number to 116 ([82acfcc](https://git.tuffraid.net/cowch/lstV2/commits/82acfcc4a9effbdf5fe8debd4617b3cfca01c0ea))
* **release:** bump build number to 117 ([d046c4e](https://git.tuffraid.net/cowch/lstV2/commits/d046c4ea41d7b16f7fb109ae1ab728edd1dbcb97))
* **release:** bump build number to 118 ([1f7c33d](https://git.tuffraid.net/cowch/lstV2/commits/1f7c33d871ce408440739bfad16165aa0dd2f982))
* **release:** bump build number to 119 ([035eda9](https://git.tuffraid.net/cowch/lstV2/commits/035eda9aa8212443c1147d420fb30fdb09b3ff35))
* **release:** bump build number to 120 ([b903c8e](https://git.tuffraid.net/cowch/lstV2/commits/b903c8ee2d9f5c0cb63404f232f235d2f9ddeb81))
* **release:** bump build number to 121 ([af5dc92](https://git.tuffraid.net/cowch/lstV2/commits/af5dc9266f5a75c0065615f1526a2ee726b66a02))
* **release:** bump build number to 122 ([28fbf2c](https://git.tuffraid.net/cowch/lstV2/commits/28fbf2c1e43996c50eca7d3b7145bd7dec31f1de))
* **release:** bump build number to 123 ([09c0825](https://git.tuffraid.net/cowch/lstV2/commits/09c082519467c85240c8ed89123cb260c0011514))
* **release:** bump build number to 124 ([098c477](https://git.tuffraid.net/cowch/lstV2/commits/098c477119c5accb0146f7bd125f80197bb44210))
* **release:** bump build number to 125 ([a647d05](https://git.tuffraid.net/cowch/lstV2/commits/a647d05d3b8aa0ec3cd8fa1dc05fbed02476769a))
* **release:** bump build number to 93 ([b4e0f4c](https://git.tuffraid.net/cowch/lstV2/commits/b4e0f4c66687d8957fa81076687d0504582812aa))
* **release:** bump build number to 94 ([f39ae0f](https://git.tuffraid.net/cowch/lstV2/commits/f39ae0f590c17e4fc4b278296f86562c572bc6d2))
* **release:** bump build number to 95 ([278c553](https://git.tuffraid.net/cowch/lstV2/commits/278c5538bcea651f06c11f5be9f4948e328261c1))
* **release:** bump build number to 96 ([ca552d5](https://git.tuffraid.net/cowch/lstV2/commits/ca552d55878bc52e01186b09b6721af24faa445b))
* **release:** bump build number to 97 ([c03b61f](https://git.tuffraid.net/cowch/lstV2/commits/c03b61f48a32cef3e03f3da8d0f70edbcecd0e59))
* **release:** bump build number to 98 ([ea7801f](https://git.tuffraid.net/cowch/lstV2/commits/ea7801fccf7ecf176804fd10fac6db91321402d7))
* **release:** bump build number to 99 ([600a989](https://git.tuffraid.net/cowch/lstV2/commits/600a98922630f0694971e110db2f8bf997571e36))
### 🛠️ Code Refactor
* **format changes:** changes to the file formats to match across computers ([9784072](https://git.tuffraid.net/cowch/lstV2/commits/9784072aab3245d52b9a8d0d6a3a5a7716e61e0c))
* **logger:** changed log level to be in the env file vs hardcoded ([f9f68ce](https://git.tuffraid.net/cowch/lstV2/commits/f9f68ce969e6d87a23c1be21fd9877d2dfa6f6cc))
* **lst:** added huston backin ([11e5cf4](https://git.tuffraid.net/cowch/lstV2/commits/11e5cf4d865f93d65d870bff4bde96288c04db24))
* **lst:** added in a removal of old files so we can keep the server clean ([c27ad7c](https://git.tuffraid.net/cowch/lstV2/commits/c27ad7cf6a5ea2ee636c1b25a66773b07a897a3e))
* **ocme:** added new error in posting where we know when the pallet is not instock ([27d6b6e](https://git.tuffraid.net/cowch/lstV2/commits/27d6b6e88423ee201e30bbcc3747126c8567801c))
* **ocp:** finished the dashboard and move logs and labels to a tab style ([7b1c6e1](https://git.tuffraid.net/cowch/lstV2/commits/7b1c6e1361fcc93729c250db29e828d6d07ca387))
* **ocp:** lots of work for rfid and dyco contorl ([ba3d721](https://git.tuffraid.net/cowch/lstV2/commits/ba3d721940e800b61aeba6f3c81d9af40be01c9c))
* **server:** removed console logs ([a5dee58](https://git.tuffraid.net/cowch/lstV2/commits/a5dee582236d3c1ab581b63fd463439a4d9e8176))
### 🐛 Bug fixes
* **admin auth:** added in role change for v1 ([f9096b5](https://git.tuffraid.net/cowch/lstV2/commits/f9096b54f5902d19226e9e5728ffa4c64d8062f9))
* **logger:** changes to get the most recent verse aged ([2c8d1fb](https://git.tuffraid.net/cowch/lstV2/commits/2c8d1fb71045dcd241e62b4ee2f1c03ae3690e5b))
* **misc:** work on ocp to improve the errors that were missed and better logging ([63b1151](https://git.tuffraid.net/cowch/lstV2/commits/63b1151cb7e1d81b080c28dbec569c851fa1b48a))
* **ocme:** fixed to make sure we can always just update a runnning nunmber ([e3ba45a](https://git.tuffraid.net/cowch/lstV2/commits/e3ba45ae13ddc21525c6113bf0f6dca2d2965637))
## [2.10.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.9.0...v2.10.0) (2025-03-27)
### 📝 Testing Code
* **ocme:** lots of changes to get it working in production ([6dd9ed8](https://git.tuffraid.net/cowch/lstV2/commits/6dd9ed848bc7d4e8a62778cfe36f89e077187157))
### 📈 Project changes
* **lst:** changes made to the settings file to work across all pvs ([5149de3](https://git.tuffraid.net/cowch/lstV2/commits/5149de3199d3aaf349b8a4c99d5db83f8d04ae49))
### 🐛 Bug fixes
* **logistics:** correction to the lane grab ([2d4b1db](https://git.tuffraid.net/cowch/lstV2/commits/2d4b1db5f4697770aee8829764bd85643893d3e8))
* **lst:** missing , in versionRc ([c35726b](https://git.tuffraid.net/cowch/lstV2/commits/c35726bf5ccd6565abda37c6618a34e975e70d41))
* **ocme:** corrections to posting data for the response was added ([dcb56d4](https://git.tuffraid.net/cowch/lstV2/commits/dcb56d427458c774b462e78daba6fee4afefd525))
* **ocme:** fixed some import errors ([b01980e](https://git.tuffraid.net/cowch/lstV2/commits/b01980e1c5a8833b25ea557a2da07b74560526e3))
* **ocme:** fixed the camera routes ([51267f5](https://git.tuffraid.net/cowch/lstV2/commits/51267f5202ceebe1c31c819395e1588d47657c38))
* **servers:** fixed the weird conflict import that happened ([8a040d1](https://git.tuffraid.net/cowch/lstV2/commits/8a040d15dbf5de5fbb9949b7834c39b93b145aa7))
### 📝 Chore
* bump build number to 50 ([9bdca33](https://git.tuffraid.net/cowch/lstV2/commits/9bdca3317c7c213f9c5853222eafe1ab028b5f64))
* **release:** bump build number to 52 ([5886bea](https://git.tuffraid.net/cowch/lstV2/commits/5886bea85da30fe43635e05de1e9bc4f5789fa64))
* **release:** bump build number to 55 ([0c5fc1d](https://git.tuffraid.net/cowch/lstV2/commits/0c5fc1dfb0a8bee5cf7414733a555fe1b5888b8e))
* **release:** bump build number to 56 ([335ea2d](https://git.tuffraid.net/cowch/lstV2/commits/335ea2deca54dacda2117849104bf4c24faee3e8))
* **release:** bump build number to 57 ([6843368](https://git.tuffraid.net/cowch/lstV2/commits/6843368c3682bb56e5ce4aafbb18367e96a6016e))
* **release:** bump build number to 58 ([0475bb3](https://git.tuffraid.net/cowch/lstV2/commits/0475bb30f9d6d4e2d132b15b24d9ab225d8de3b9))
* **release:** bump build number to 59 ([9395ec6](https://git.tuffraid.net/cowch/lstV2/commits/9395ec6cd4483f52fcca949a95a4ceecaa843f65))
* **release:** bump build number to 60 ([57e82d2](https://git.tuffraid.net/cowch/lstV2/commits/57e82d23603622c301c7e6d636f9cec07d44e0b2))
* **release:** bump build number to 61 ([5ed67f3](https://git.tuffraid.net/cowch/lstV2/commits/5ed67f3fc0f99ca5344da8b73fd005184b89670b))
* **release:** bump build number to 62 ([c1cc355](https://git.tuffraid.net/cowch/lstV2/commits/c1cc355f4f4e74c3897cada64d961b28d24c07b2))
* **release:** bump build number to 63 ([e1cad02](https://git.tuffraid.net/cowch/lstV2/commits/e1cad027d2714ddf3289e31b5c3bbb96306f1f56))
* **release:** bump build number to 64 ([63177b5](https://git.tuffraid.net/cowch/lstV2/commits/63177b523e2dd1fabefe52f52dd3c6b3fcff9bcf))
* **release:** bump build number to 65 ([3a4dc47](https://git.tuffraid.net/cowch/lstV2/commits/3a4dc47a368bb20f622b7e4647337c5e68150db9))
* **release:** bump build number to 66 ([b3b5fce](https://git.tuffraid.net/cowch/lstV2/commits/b3b5fcec651e2bc585ecd4be03be4288867b214f))
* **release:** bump build number to 67 ([cb01ef1](https://git.tuffraid.net/cowch/lstV2/commits/cb01ef1af17e9c83753e09a1528e6140a4104273))
* **release:** bump build number to 68 ([4664768](https://git.tuffraid.net/cowch/lstV2/commits/46647687dc9938ecf6e72a63f15afc0e29bebcc4))
* **release:** bump build number to 69 ([2c227f9](https://git.tuffraid.net/cowch/lstV2/commits/2c227f94287a6ce9c06b0a41772085ba1f4f0cd3))
* **release:** bump build number to 70 ([96abef7](https://git.tuffraid.net/cowch/lstV2/commits/96abef762b77361c857642a33acfb69c0bc00666))
* **release:** bump build number to 71 ([836f3e2](https://git.tuffraid.net/cowch/lstV2/commits/836f3e294b4d92673388023503e409592ef95ba3))
* **release:** bump build number to 72 ([abe53b8](https://git.tuffraid.net/cowch/lstV2/commits/abe53b8f5d9bfbf517c7f56c5d4df2e4586aedbd))
* **release:** bump build number to 73 ([5201012](https://git.tuffraid.net/cowch/lstV2/commits/5201012235181975cb89aee8dbc644ca4aa42210))
* **release:** bump build number to 74 ([529e922](https://git.tuffraid.net/cowch/lstV2/commits/529e922485303251349c081ad8b2e9bee08dd420))
* **release:** bump build number to 75 ([b8c028a](https://git.tuffraid.net/cowch/lstV2/commits/b8c028a6c1fa54afeb049fd42e666bc40690aa4e))
* **release:** bump build number to 76 ([9254e52](https://git.tuffraid.net/cowch/lstV2/commits/9254e528086b95ada8c9dc4468f4fbb5b39fbd68))
* **release:** bump build number to 77 ([4d4d6fc](https://git.tuffraid.net/cowch/lstV2/commits/4d4d6fc7a4885b4b1652ac125a2b39b8325b0d6e))
* **release:** bump build number to 78 ([36cdc7b](https://git.tuffraid.net/cowch/lstV2/commits/36cdc7b4bf3bf90a785bab4d9892e65f84cb162a))
* **release:** bump build number to 79 ([8d2721b](https://git.tuffraid.net/cowch/lstV2/commits/8d2721b0c2f6255affcd9ec08427e1b4e6771107))
* **release:** bump build number to 80 ([cacfd6d](https://git.tuffraid.net/cowch/lstV2/commits/cacfd6d2e0e11ab7dbc5cb443a58df7bf8d2b8bb))
* **release:** bump build number to 81 ([2509888](https://git.tuffraid.net/cowch/lstV2/commits/250988800e1429a1f46005ae54a2a07d31fac3a8))
* **release:** bump build number to 82 ([c2aa69a](https://git.tuffraid.net/cowch/lstV2/commits/c2aa69ab0a2f925944abd1b78af6a8698249bff8))
* **release:** bump build number to 83 ([7f946c0](https://git.tuffraid.net/cowch/lstV2/commits/7f946c095b8c0208c97bd1bb2c33cf466a04d125))
* **release:** bump build number to 84 ([4162543](https://git.tuffraid.net/cowch/lstV2/commits/416254353cd0a926aaf14c343db2ad18f025b230))
* **release:** bump build number to 85 ([5d61eb8](https://git.tuffraid.net/cowch/lstV2/commits/5d61eb879e102939df56928cd8d57eda561aabca))
* **release:** bump build number to 86 ([5ab813f](https://git.tuffraid.net/cowch/lstV2/commits/5ab813f37894136549de0a05e5b4e2491220d16d))
* **release:** bump build number to 87 ([509ef84](https://git.tuffraid.net/cowch/lstV2/commits/509ef8472688ba655c524a1068c3721559f5da11))
* **release:** bump build number to 88 ([c0a0589](https://git.tuffraid.net/cowch/lstV2/commits/c0a0589b3c860ae8202a5dd230c18a3463cce857))
* **release:** bump build number to 89 ([af076b8](https://git.tuffraid.net/cowch/lstV2/commits/af076b8e27b599c479e4e51f38487cf4cb3cfa34))
* **release:** bump build number to 90 ([04aa943](https://git.tuffraid.net/cowch/lstV2/commits/04aa9439205f12bfe10b3fdf76a211a7b8178ac1))
* **release:** bump build number to 91 ([a31e7ea](https://git.tuffraid.net/cowch/lstV2/commits/a31e7ea1634fd6b10e22dff4ba93157c2be711ac))
* **release:** bump build number to 92 ([878e650](https://git.tuffraid.net/cowch/lstV2/commits/878e650e6237345d825632dff0387a89c7eee088))
### 🌟 Enhancements
* **logistics:** added in return material by lane name and gets lane id ([58f7b43](https://git.tuffraid.net/cowch/lstV2/commits/58f7b4322d3e523620f827a580ff5534b0be5f6c))
* **notify:** intial nofity system added to monitor crashes and rfid wrapper ([eb051d5](https://git.tuffraid.net/cowch/lstV2/commits/eb051d51f21b1ad617851fa3f4a1b8ba2f4fe4ac))
* **ocme:** manual camera trigger for the wrapper added ([bc18211](https://git.tuffraid.net/cowch/lstV2/commits/bc1821132e30be6b3a36bae63ce52fd4007f74dd))
* **ocp:** add lots with scroll view was added ([9e9a56c](https://git.tuffraid.net/cowch/lstV2/commits/9e9a56cbb15782770daf7e4ab08b31ad23df6c27))
* **ocp:** added in printers get and add ([f90066c](https://git.tuffraid.net/cowch/lstV2/commits/f90066c09020ebac03a93059c8e41f8531812c8a))
* **ocp:** added labeling logs in ([ceaa25f](https://git.tuffraid.net/cowch/lstV2/commits/ceaa25f31e6da526abd0350881e21984c66b455a))
* **ocp:** added wrappercard into the mix ([3b5e82f](https://git.tuffraid.net/cowch/lstV2/commits/3b5e82fdc122824b4f59f00f2ed59b90813694ba))
* **ocp:** create and book in plus dyco controller implemented ([7a1a477](https://git.tuffraid.net/cowch/lstV2/commits/7a1a4773e71cab93f36071530dbb5561e7592ec7))
* **ocp:** prodlink check added ([f9cd3fb](https://git.tuffraid.net/cowch/lstV2/commits/f9cd3fb8815635fdd0736b573dec86d14b24a6a7))
* **server:** clearn up code ([fe0c500](https://git.tuffraid.net/cowch/lstV2/commits/fe0c500dcfe317b3f67d67474fda7cf6872f3f37))
### 🛠️ Code Refactor
* **frontend:** prettier change to formatting ([c71b514](https://git.tuffraid.net/cowch/lstV2/commits/c71b514d9add69c63e608b22bd8a936fa770b167))
* **ocme:** clean up on the getInfo endpoint ([b9f1909](https://git.tuffraid.net/cowch/lstV2/commits/b9f19095cbd86569b58bec99575d924db997e385))
* **ocme:** cleaup on pickedup ([73aa95a](https://git.tuffraid.net/cowch/lstV2/commits/73aa95a6937129a36f6ece10ef8d6fd5f01a2b27))
* **ocme:** corrections to endpoints to work with ocnme as intneeded ([e6e1cec](https://git.tuffraid.net/cowch/lstV2/commits/e6e1cecce33b3c8cd94cf6372601c92f268b12a5))
* **ocme:** removed some info logs as ocme calls alot ([96e7f74](https://git.tuffraid.net/cowch/lstV2/commits/96e7f742fe68cc98de3039bd3dbfb2d27f6d7204))
* **ocp:** moved printers to there own folder to keep everything clean ([030f9f9](https://git.tuffraid.net/cowch/lstV2/commits/030f9f9aacdfcca1298a26be4442f5629626ba79))
* **rfid:** refactored station 3 (lines) and complete logic ([bd11feb](https://git.tuffraid.net/cowch/lstV2/commits/bd11feb1365ffb058283eb9384684c199ef9fd21))
* **rfid:** refactored the way the wrapper works to indlude backup dyco plan ([90e9bb0](https://git.tuffraid.net/cowch/lstV2/commits/90e9bb0ff6a2f598b055fae931a0d3c78f93e868))
* **server:** changed to log only when in dev, and removed the redirect of the url ([adf0880](https://git.tuffraid.net/cowch/lstV2/commits/adf08806593fdcd3a3d9d0a6d07f0262501e21ad))
* **tcpserver:** just the tcp server doing something ([04eb2e3](https://git.tuffraid.net/cowch/lstV2/commits/04eb2e3e145ba99b330ab627fcd9bae436e17fcf))
* **updateserver:** removed ocme from starting back up as it was migrated ([2551d6c](https://git.tuffraid.net/cowch/lstV2/commits/2551d6c9074a0338224d81e690600a7a4b9c9777))
## [2.9.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.8.0...v2.9.0) (2025-03-23) ## [2.9.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.8.0...v2.9.0) (2025-03-23)

View File

@@ -0,0 +1,17 @@
CREATE TABLE "printers" (
"printer_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"humanReadableId" text,
"name" text NOT NULL,
"ipAddress" text,
"port" numeric NOT NULL,
"status" text,
"statusText" text NOT NULL,
"lastTimePrinted" text,
"assigned" boolean DEFAULT false NOT NULL,
"remark" text,
"monitorState" boolean DEFAULT false NOT NULL,
"add_Date" timestamp DEFAULT now(),
"upd_date" timestamp DEFAULT now()
);
--> statement-breakpoint
CREATE UNIQUE INDEX "humanReadableId" ON "printers" USING btree ("name");

View File

@@ -0,0 +1,3 @@
ALTER TABLE "printers" ALTER COLUMN "statusText" DROP NOT NULL;--> statement-breakpoint
ALTER TABLE "printers" ALTER COLUMN "assigned" DROP NOT NULL;--> statement-breakpoint
ALTER TABLE "printers" ALTER COLUMN "monitorState" DROP NOT NULL;

View File

@@ -0,0 +1 @@
ALTER TABLE "printers" ALTER COLUMN "port" DROP NOT NULL;

View File

@@ -0,0 +1,2 @@
DROP INDEX "humanReadableId";--> statement-breakpoint
CREATE UNIQUE INDEX "humanReadableId" ON "printers" USING btree ("humanReadableId");

View File

@@ -0,0 +1,4 @@
CREATE TABLE "prodlabels" (
"label_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"runningNr" integer NOT NULL
);

View File

@@ -0,0 +1,7 @@
ALTER TABLE "prodlabels" ADD COLUMN "printerID" integer;--> statement-breakpoint
ALTER TABLE "prodlabels" ADD COLUMN "printerName" text;--> statement-breakpoint
ALTER TABLE "prodlabels" ADD COLUMN "line" integer;--> statement-breakpoint
ALTER TABLE "prodlabels" ADD COLUMN "status" text;--> statement-breakpoint
ALTER TABLE "prodlabels" ADD COLUMN "add_date" timestamp;--> statement-breakpoint
ALTER TABLE "prodlabels" ADD COLUMN "upd_date" timestamp;--> statement-breakpoint
CREATE UNIQUE INDEX "runningNr" ON "prodlabels" USING btree ("runningNr");

View File

@@ -0,0 +1,4 @@
ALTER TABLE "prodlabels" ALTER COLUMN "add_date" SET DEFAULT now();--> statement-breakpoint
ALTER TABLE "prodlabels" ALTER COLUMN "upd_date" SET DEFAULT now();--> statement-breakpoint
ALTER TABLE "prodlabels" ADD COLUMN "add_user" text DEFAULT 'lst';--> statement-breakpoint
CREATE UNIQUE INDEX "ocme_runningNr" ON "ocmeData" USING btree ("runningNr");

View File

@@ -0,0 +1,13 @@
CREATE TABLE "notifications" (
"notify_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"name" text NOT NULL,
"description" text NOT NULL,
"checkInterval" text DEFAULT '1',
"timeType" text DEFAULT 'hour',
"emails" text,
"active" boolean DEFAULT false,
"lastRan" timestamp DEFAULT now(),
"notifiySettings" jsonb DEFAULT '{}'::jsonb
);
--> statement-breakpoint
CREATE UNIQUE INDEX "notify_name" ON "notifications" USING btree ("name");

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -183,6 +183,62 @@
"when": 1742655504936, "when": 1742655504936,
"tag": "0025_amusing_sugar_man", "tag": "0025_amusing_sugar_man",
"breakpoints": true "breakpoints": true
},
{
"idx": 26,
"version": "7",
"when": 1742914066219,
"tag": "0026_daily_the_twelve",
"breakpoints": true
},
{
"idx": 27,
"version": "7",
"when": 1742917145140,
"tag": "0027_needy_sleepwalker",
"breakpoints": true
},
{
"idx": 28,
"version": "7",
"when": 1742917676211,
"tag": "0028_fast_wong",
"breakpoints": true
},
{
"idx": 29,
"version": "7",
"when": 1742917978318,
"tag": "0029_giant_blue_blade",
"breakpoints": true
},
{
"idx": 30,
"version": "7",
"when": 1742938986653,
"tag": "0030_conscious_cable",
"breakpoints": true
},
{
"idx": 31,
"version": "7",
"when": 1742939306614,
"tag": "0031_loud_alex_power",
"breakpoints": true
},
{
"idx": 32,
"version": "7",
"when": 1743124980863,
"tag": "0032_tough_iron_monger",
"breakpoints": true
},
{
"idx": 33,
"version": "7",
"when": 1743424730855,
"tag": "0033_flimsy_salo",
"breakpoints": true
} }
] ]
} }

View File

@@ -0,0 +1,36 @@
import {
boolean,
jsonb,
pgTable,
text,
timestamp,
uniqueIndex,
uuid,
} from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
export const notifications = pgTable(
"notifications",
{
notify_id: uuid("notify_id").defaultRandom().primaryKey(),
name: text("name").notNull(),
description: text("description").notNull(),
checkInterval: text("checkInterval").default("1"),
timeType: text("timeType").default("hour"),
emails: text("emails"),
active: boolean("active").default(false),
lastRan: timestamp("lastRan").defaultNow(),
notifiySettings: jsonb("notifiySettings").default({}),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
uniqueIndex("notify_name").on(table.name),
]
);
// Schema for inserting a user - can be used to validate API requests
// export const insertRolesSchema = createInsertSchema(roles, {
// name: z.string().min(3, {message: "Role name must be more than 3 letters"}),
// });
// Schema for selecting a Expenses - can be used to validate API responses
export const selectNotificationsSchema = createSelectSchema(notifications);

View File

@@ -1,6 +1,15 @@
import {text, pgTable, numeric, index, timestamp, boolean, uuid, uniqueIndex} from "drizzle-orm/pg-core"; import {
import {createInsertSchema, createSelectSchema} from "drizzle-zod"; text,
import {z} from "zod"; pgTable,
numeric,
index,
timestamp,
boolean,
uuid,
uniqueIndex,
} from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { z } from "zod";
export const ocmeData = pgTable( export const ocmeData = pgTable(
"ocmeData", "ocmeData",
@@ -20,6 +29,7 @@ export const ocmeData = pgTable(
(table) => [ (table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name), // uniqueIndex("role_name").on(table.name),
uniqueIndex("ocme_runningNr").on(table.runningNr),
] ]
); );

View File

@@ -0,0 +1,42 @@
import {
text,
pgTable,
numeric,
index,
timestamp,
boolean,
uuid,
uniqueIndex,
} from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { z } from "zod";
export const printers = pgTable(
"printers",
{
printer_id: uuid("printer_id").defaultRandom().primaryKey(),
humanReadableId: text("humanReadableId"),
name: text("name").notNull(),
ipAddress: text("ipAddress"),
port: numeric("port"),
status: text("status"),
statusText: text("statusText"),
lastTimePrinted: text("lastTimePrinted"),
assigned: boolean("assigned").default(false),
remark: text("remark"),
monitorState: boolean("monitorState").default(false),
add_Date: timestamp("add_Date").defaultNow(),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [
//uniqueIndex("emailUniqueIndex").on(sql`lower(${table.email})`),
uniqueIndex("humanReadableId").on(table.humanReadableId),
]
);
// Schema for inserting a user - can be used to validate API requests
// export const insertRolesSchema = createInsertSchema(roles, {
// name: z.string().min(3, {message: "Role name must be more than 3 letters"}),
// });
// Schema for selecting a Expenses - can be used to validate API responses
export const selectRolesSchema = createSelectSchema(printers);

View File

@@ -0,0 +1,30 @@
import {
integer,
pgTable,
uuid,
uniqueIndex,
text,
timestamp,
} from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
export const prodlabels = pgTable(
"prodlabels",
{
label_id: uuid("label_id").defaultRandom().primaryKey(),
printerID: integer("printerID"),
printerName: text("printerName"),
line: integer("line"),
runningNr: integer("runningNr").notNull(),
status: text("status"),
add_user: text("add_user").default("lst"),
add_date: timestamp("add_date").defaultNow(),
upd_date: timestamp("upd_date").defaultNow(),
},
(table) => [
//uniqueIndex("emailUniqueIndex").on(sql`lower(${table.email})`),
uniqueIndex("runningNr").on(table.runningNr),
]
);
export const prodlabelsSchema = createSelectSchema(prodlabels);

View File

@@ -1,7 +1,14 @@
import {text, pgTable, timestamp, uuid, uniqueIndex, jsonb} from "drizzle-orm/pg-core"; import {
import {createSelectSchema} from "drizzle-zod"; text,
import {z} from "zod"; pgTable,
import {modules} from "./modules.js"; timestamp,
uuid,
uniqueIndex,
jsonb,
} from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
import { z } from "zod";
import { modules } from "./modules.js";
export const settings = pgTable( export const settings = pgTable(
"settings", "settings",

File diff suppressed because it is too large Load Diff

View File

@@ -8,11 +8,12 @@
"build": "rimraf dist && tsc -b && vite build", "build": "rimraf dist && tsc -b && vite build",
"lint": "eslint .", "lint": "eslint .",
"preview": "vite preview", "preview": "vite preview",
"shad": "npx shadcn@canary add ", "shad": "npx shadcn@latest add ",
"checkupdates": "npm-check-updates" "checkupdates": "npm-check-updates"
}, },
"dependencies": { "dependencies": {
"@hookform/resolvers": "^4.1.3", "@hookform/resolvers": "^4.1.3",
"@radix-ui/react-accordion": "^1.2.3",
"@radix-ui/react-avatar": "^1.1.3", "@radix-ui/react-avatar": "^1.1.3",
"@radix-ui/react-checkbox": "^1.1.4", "@radix-ui/react-checkbox": "^1.1.4",
"@radix-ui/react-collapsible": "^1.1.3", "@radix-ui/react-collapsible": "^1.1.3",
@@ -20,12 +21,14 @@
"@radix-ui/react-dropdown-menu": "^2.1.6", "@radix-ui/react-dropdown-menu": "^2.1.6",
"@radix-ui/react-label": "^2.1.2", "@radix-ui/react-label": "^2.1.2",
"@radix-ui/react-popover": "^1.1.6", "@radix-ui/react-popover": "^1.1.6",
"@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-select": "^2.1.6", "@radix-ui/react-select": "^2.1.6",
"@radix-ui/react-separator": "^1.1.2", "@radix-ui/react-separator": "^1.1.2",
"@radix-ui/react-slot": "^1.1.2", "@radix-ui/react-slot": "^1.1.2",
"@radix-ui/react-tabs": "^1.1.3", "@radix-ui/react-tabs": "^1.1.3",
"@radix-ui/react-tooltip": "^1.1.8", "@radix-ui/react-tooltip": "^1.1.8",
"@tailwindcss/vite": "^4.0.15", "@tailwindcss/vite": "^4.0.15",
"@tanstack/react-form": "^1.2.1",
"@tanstack/react-query": "^5.69.0", "@tanstack/react-query": "^5.69.0",
"@tanstack/react-router": "^1.114.27", "@tanstack/react-router": "^1.114.27",
"@tanstack/react-table": "^8.21.2", "@tanstack/react-table": "^8.21.2",
@@ -45,6 +48,7 @@
"react-dom": "^19.0.0", "react-dom": "^19.0.0",
"react-grid-layout": "^1.5.1", "react-grid-layout": "^1.5.1",
"react-hook-form": "^7.54.2", "react-hook-form": "^7.54.2",
"react-resizable-panels": "^2.1.7",
"sonner": "^2.0.1", "sonner": "^2.0.1",
"tailwind-merge": "^3.0.2", "tailwind-merge": "^3.0.2",
"tailwindcss": "^4.0.15", "tailwindcss": "^4.0.15",

View File

@@ -35,11 +35,14 @@ export default function ServerPage() {
const { modules } = useModuleStore(); const { modules } = useModuleStore();
const router = useRouter(); const router = useRouter();
const { data, isError, error, isLoading } = useQuery(getServers(token ?? "")); const { data, isError, error, isLoading } = useQuery(
getServers(token ?? "")
);
const adminModule = modules.filter((n) => n.name === "admin"); const adminModule = modules.filter((n) => n.name === "admin");
const userLevel = const userLevel =
user?.roles?.filter((r) => r.module_id === adminModule[0].module_id) || []; user?.roles?.filter((r) => r.module_id === adminModule[0].module_id) ||
[];
if (!adminModule[0]?.roles?.includes(userLevel[0]?.role)) { if (!adminModule[0]?.roles?.includes(userLevel[0]?.role)) {
router.navigate({ to: "/" }); router.navigate({ to: "/" });
@@ -49,7 +52,7 @@ export default function ServerPage() {
return <div>{JSON.stringify(error)}</div>; return <div>{JSON.stringify(error)}</div>;
} }
console.log(data); //console.log(data);
return ( return (
<LstCard className="m-2 flex place-content-center w-dvh"> <LstCard className="m-2 flex place-content-center w-dvh">
<Table> <Table>
@@ -89,11 +92,19 @@ export default function ServerPage() {
) : ( ) : (
<TableBody> <TableBody>
{data?.map((server: Servers) => { {data?.map((server: Servers) => {
const strippedDate = server.lastUpdated.replace("Z", ""); // Remove Z const strippedDate = server.lastUpdated.replace(
const formattedDate = format(strippedDate, "MM/dd/yyyy hh:mm a"); "Z",
""
); // Remove Z
const formattedDate = format(
strippedDate,
"MM/dd/yyyy hh:mm a"
);
return ( return (
<TableRow key={server.server_id}> <TableRow key={server.server_id}>
<TableCell className="font-medium">{server.sName}</TableCell> <TableCell className="font-medium">
{server.sName}
</TableCell>
<TableCell className="font-medium"> <TableCell className="font-medium">
{server.serverDNS} {server.serverDNS}
</TableCell> </TableCell>
@@ -103,13 +114,20 @@ export default function ServerPage() {
<TableCell className="font-medium"> <TableCell className="font-medium">
{server.idAddress} {server.idAddress}
</TableCell> </TableCell>
<TableCell className="font-medium">{formattedDate}</TableCell> <TableCell className="font-medium">
{formattedDate}
</TableCell>
<TableCell className="font-medium"> <TableCell className="font-medium">
{adminUrlCheck() && ( {adminUrlCheck() && (
<div className="flex flex-row"> <div className="flex flex-row">
<UpdateServer server={server} token={token as string} /> <UpdateServer
server={server}
token={token as string}
/>
<StartServer /> <StartServer />
<StopServer /> <StopServer
plantData={server}
/>
<RestartServer /> <RestartServer />
</div> </div>
)} )}

View File

@@ -1,14 +1,46 @@
import {Button} from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import {Tooltip, TooltipContent, TooltipProvider, TooltipTrigger} from "@/components/ui/tooltip"; import {
import {Octagon} from "lucide-react"; Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
import axios from "axios";
import { Octagon } from "lucide-react";
export default function StopServer() { export default function StopServer(plantData: any) {
const token = localStorage.getItem("auth_token");
const handleStopServer = async (plant: string) => {
let data: any = {
processType: "stop",
plantToken: plant,
};
const url: string = window.location.host.split(":")[0];
if (url === "localhost") {
data = { ...data, remote: "true" };
}
try {
const res = await axios.post("/api/server/serviceprocess", data, {
headers: { Authorization: `Bearer ${token}` },
});
console.log(res);
} catch (error) {
console.log(error);
}
};
return ( return (
<div> <div>
<TooltipProvider> <TooltipProvider>
<Tooltip> <Tooltip>
<TooltipTrigger asChild> <TooltipTrigger asChild>
<Button variant={"outline"} size={"icon"}> <Button
variant="destructive"
size={"icon"}
onClick={() =>
handleStopServer(plantData.plantToken)
}
>
<Octagon /> <Octagon />
</Button> </Button>
</TooltipTrigger> </TooltipTrigger>

View File

@@ -1,23 +1,34 @@
import {Button} from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import {CircleFadingArrowUp} from "lucide-react"; import { CircleFadingArrowUp } from "lucide-react";
import {toast} from "sonner"; import { toast } from "sonner";
import {Servers} from "./ServerPage"; import { Servers } from "./ServerPage";
import {useQuery} from "@tanstack/react-query"; import { useQuery } from "@tanstack/react-query";
import {getSettings} from "@/utils/querys/settings"; import { getSettings } from "@/utils/querys/settings";
import axios from "axios"; import axios from "axios";
import {Tooltip, TooltipContent, TooltipProvider, TooltipTrigger} from "@/components/ui/tooltip"; import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
export default function UpdateServer({server, token}: {server: Servers; token: string}) { export default function UpdateServer({
const {data} = useQuery(getSettings(token ?? "")); server,
token,
}: {
server: Servers;
token: string;
}) {
const { data } = useQuery(getSettings(token ?? ""));
const upgrade = async () => { const upgrade = async () => {
let devDir = data.filter((n: any) => n.name === "devDir"); let devDir = data.filter((n: any) => n.name === "devDir");
toast.success("Server being upgraded in the background please wait."); toast.success("Server being upgraded in the background please wait.");
try { try {
const result = await axios.post( const result = await axios.post(
`/api/server/update/${server.plantToken}`, `/api/server/update/${server.plantToken}`,
{devDir: devDir[0].value}, { devDir: devDir[0].value },
{ {
headers: {Authorization: `Bearer ${token}`}, headers: { Authorization: `Bearer ${token}` },
} }
); );
@@ -29,7 +40,9 @@ export default function UpdateServer({server, token}: {server: Servers; token: s
toast.success(result.data.message); toast.success(result.data.message);
} }
} catch (error: any) { } catch (error: any) {
toast.error(`There was an error updating the server: ${error.data.message}`); toast.error(
`There was an error updating the server: ${error.data.message}`
);
} }
}; };
return ( return (
@@ -37,7 +50,12 @@ export default function UpdateServer({server, token}: {server: Servers; token: s
<TooltipProvider> <TooltipProvider>
<Tooltip> <Tooltip>
<TooltipTrigger asChild> <TooltipTrigger asChild>
<Button variant={"outline"} size={"icon"} onClick={upgrade} disabled={server.isUpgrading}> <Button
variant={`${server.isUpgrading ? "ghost" : "outline"}`}
size={"icon"}
onClick={upgrade}
disabled={server.isUpgrading}
>
<CircleFadingArrowUp /> <CircleFadingArrowUp />
</Button> </Button>
</TooltipTrigger> </TooltipTrigger>

View File

@@ -0,0 +1,43 @@
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { getUsers } from "@/utils/querys/admin/users";
import { useQuery } from "@tanstack/react-query";
import UserCard from "./components/UserCard";
export default function UserPage() {
const { data, isError, error, isLoading } = useQuery(getUsers());
if (isLoading) return <div className="m-auto">Loading users...</div>;
if (isError)
return (
<div className="m-auto">
There was an error getting the users.... {JSON.stringify(error)}
</div>
);
return (
<div className="m-2 w-dvw">
<Accordion type="single" collapsible>
{data.map((u: any) => {
return (
<AccordionItem key={u.user_id} value={u.user_id}>
<AccordionTrigger>
<span>{u.username}</span>
</AccordionTrigger>
<AccordionContent>
<div>
<UserCard user={u} />
</div>
</AccordionContent>
</AccordionItem>
);
})}
</Accordion>
</div>
);
}

View File

@@ -0,0 +1,238 @@
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectGroup,
SelectItem,
SelectLabel,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { DebugButton } from "@/utils/formStuff/debugButton";
import { userFormOptions } from "@/utils/formStuff/options/userformOptions";
import { generatePassword } from "@/utils/passwordGen";
import { getUsers } from "@/utils/querys/admin/users";
import { useForm } from "@tanstack/react-form";
import { useQuery } from "@tanstack/react-query";
import axios from "axios";
import { toast } from "sonner";
export default function UserCard(data: any) {
const token = localStorage.getItem("auth_token");
const { refetch } = useQuery(getUsers());
const form = useForm({
...userFormOptions(data.user),
onSubmit: async ({ value }) => {
// Do something with form data
const userData = { ...value, user_id: data.user.user_id };
try {
const res = await axios.patch(
"/api/auth/updateuser",
userData,
{
headers: {
Authorization: `Bearer ${token}`,
},
}
);
if (res.data.success) {
toast.success(res.data.message);
refetch();
form.reset();
} else {
res.data.message;
}
} catch (error) {
console.log(error);
}
},
});
return (
<div>
<form
onSubmit={(e) => {
e.preventDefault();
e.stopPropagation();
}}
>
<form.Field
name="username"
validators={{
// We can choose between form-wide and field-specific validators
onChange: ({ value }) =>
value.length > 3
? undefined
: "Username must be longer than 3 letters",
}}
children={(field) => {
return (
<div className="m-2 min-w-48 max-w-96 p-2">
<Label htmlFor="username">Username</Label>
<Input
name={field.name}
value={field.state.value}
onBlur={field.handleBlur}
//type="number"
onChange={(e) =>
field.handleChange(e.target.value)
}
/>
{field.state.meta.errors.length ? (
<em>{field.state.meta.errors.join(",")}</em>
) : null}
</div>
);
}}
/>
<form.Field
name="email"
validators={{
// We can choose between form-wide and field-specific validators
onChange: ({ value }) =>
value.length > 3
? undefined
: "You must enter a correct ",
}}
children={(field) => {
return (
<div className="m-2 min-w-48 max-w-96 p-2">
<Label htmlFor="email">Email</Label>
<Input
name={field.name}
value={field.state.value}
onBlur={field.handleBlur}
//type="number"
onChange={(e) =>
field.handleChange(e.target.value)
}
/>
{field.state.meta.errors.length ? (
<em>{field.state.meta.errors.join(",")}</em>
) : null}
</div>
);
}}
/>
<form.Field
name="role"
//listeners={{onChange: ({value})=>{}}}
children={(field) => {
return (
<div className="m-2 min-w-48 max-w-96 p-2">
<Label htmlFor={field.name}>Select role</Label>
<Select
value={field.state.value}
onValueChange={field.handleChange}
>
<SelectTrigger className="w-[180px]">
<SelectValue
id={field.name}
placeholder="Select Role"
/>
</SelectTrigger>
<SelectContent>
<SelectGroup>
<SelectLabel>Roles</SelectLabel>
<SelectItem value="viewer">
Viewer
</SelectItem>
<SelectItem value="operator">
Operator
</SelectItem>
<SelectItem value="manager">
Manager
</SelectItem>
<SelectItem value="admin">
Admin
</SelectItem>
</SelectGroup>
</SelectContent>
</Select>
</div>
);
}}
/>
<form.Field
name="password"
validators={{
onChangeAsyncDebounceMs: 500,
onChangeAsync: ({ value }) => {
if (
window.location.pathname.includes("/users") &&
value.length === 0
) {
return;
}
if (value.length < 4) {
return "Password must be at least 4 characters long.";
}
if (!/[A-Z]/.test(value)) {
return "Password must contain at least one uppercase letter.";
}
if (!/[a-z]/.test(value)) {
return "Password must contain at least one lower case letter.";
}
if (!/[0-9]/.test(value)) {
return "Password must contain at least one number.";
}
if (
!/[!@#$%^&*()_+\-=\[\]{};':"\\|,.<>\/?]/.test(
value
)
) {
return "Password must contain at least one special character.";
}
},
}}
children={(field) => {
return (
<div className="m-2 p-2">
<Label htmlFor="password">
Change Password
</Label>
<div className="mt-2 flex flex-row">
<Input
className="min-w-48 max-w-96"
name={field.name}
value={field.state.value}
onBlur={field.handleBlur}
//type="number"
onChange={(e) =>
field.handleChange(e.target.value)
}
/>
<Button
className="ml-2"
onClick={() =>
field.handleChange(
generatePassword(8)
)
}
>
Random password
</Button>
<DebugButton data={form.state.values} />
</div>
{field.state.meta.errors.length ? (
<em>{field.state.meta.errors.join(",")}</em>
) : null}
</div>
);
}}
/>
</form>
<div>
<Button onClick={form.handleSubmit}>Save</Button>
</div>
</div>
);
}

View File

@@ -1,4 +1,14 @@
import {Atom, Logs, Minus, Plus, Server, Settings, ShieldCheck, Users, Webhook} from "lucide-react"; import {
Atom,
Logs,
Minus,
Plus,
Server,
Settings,
ShieldCheck,
Users,
Webhook,
} from "lucide-react";
import { import {
SidebarGroup, SidebarGroup,
SidebarGroupContent, SidebarGroupContent,
@@ -10,7 +20,11 @@ import {
SidebarMenuSubButton, SidebarMenuSubButton,
SidebarMenuSubItem, SidebarMenuSubItem,
} from "../../ui/sidebar"; } from "../../ui/sidebar";
import {Collapsible, CollapsibleContent, CollapsibleTrigger} from "../../ui/collapsible"; import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from "../../ui/collapsible";
const items = [ const items = [
{ {
@@ -53,9 +67,9 @@ const data = {
}, },
{ {
title: "Users", title: "Users",
url: "#", url: "/users",
icon: Users, icon: Users,
isActive: false, isActive: true,
}, },
{ {
title: "UCD", title: "UCD",
@@ -82,7 +96,11 @@ export function AdminSideBar() {
<SidebarGroupContent> <SidebarGroupContent>
<SidebarMenu> <SidebarMenu>
{data.navMain.map((item, index) => ( {data.navMain.map((item, index) => (
<Collapsible key={item.title} defaultOpen={index === 1} className="group/collapsible"> <Collapsible
key={item.title}
defaultOpen={index === 1}
className="group/collapsible"
>
<SidebarMenuItem> <SidebarMenuItem>
<CollapsibleTrigger asChild> <CollapsibleTrigger asChild>
<SidebarMenuButton> <SidebarMenuButton>
@@ -96,15 +114,25 @@ export function AdminSideBar() {
<CollapsibleContent> <CollapsibleContent>
<SidebarMenuSub> <SidebarMenuSub>
{item.items.map((item) => ( {item.items.map((item) => (
<SidebarMenuSubItem key={item.title}> <SidebarMenuSubItem
key={item.title}
>
{item.isActive && ( {item.isActive && (
<SidebarMenuSubButton asChild> <SidebarMenuSubButton
asChild
>
<a <a
href={item.url} href={item.url}
target={item.newWindow ? "_blank" : "_self"} target={
item.newWindow
? "_blank"
: "_self"
}
> >
<item.icon /> <item.icon />
<span>{item.title}</span> <span>
{item.title}
</span>
</a> </a>
</SidebarMenuSubButton> </SidebarMenuSubButton>
)} )}

View File

@@ -0,0 +1,131 @@
import axios from "axios";
import { LstCard } from "../extendedUI/LstCard";
import { Button } from "../ui/button";
import { ScrollArea } from "../ui/scroll-area";
import { Skeleton } from "../ui/skeleton";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "../ui/table";
import { toast } from "sonner";
const currentPallets = [
{ key: "line", label: "Line" },
{ key: "runningNr", label: "Running #" },
{ key: "upd_date", label: "Date Scanned" },
{ key: "waitingfor", label: "Waiting For" },
{ key: "clear", label: "Clear" },
];
const currentTags = [
{ key: "line", label: "Line" },
{ key: "printerName", label: "Printer" },
{ key: "runningNr", label: "Running #" },
{ key: "upd_date", label: "Label date" },
{ key: "status", label: "Label Status" },
];
export default function WrapperManualTrigger() {
const cameraTrigger = async () => {
try {
const res = await axios.get("/ocme/api/v1/manualCameraTrigger");
if (res.data.success) {
toast.success(res.data.message);
return;
}
if (!res.data.success) {
toast.error(res.data.message);
}
} catch (error) {
console.log(error);
//stoast.success(error.data.message);
}
};
return (
<LstCard className="m-2 p-2">
<ScrollArea className="max-h-[200px]">
<span>Wrapper Pallet Info</span>
<Table>
<TableHeader>
<TableRow>
{currentPallets.map((l) => (
<TableHead key={l.key}>{l.label}</TableHead>
))}
</TableRow>
</TableHeader>
<TableBody>
{Array(3)
.fill(0)
.map((_, i) => (
<TableRow key={i}>
<TableCell className="font-medium">
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</ScrollArea>
<ScrollArea className="max-h-[200px]">
<Table>
<TableHeader>
<TableRow>
{currentTags.map((l) => (
<TableHead key={l.key}>{l.label}</TableHead>
))}
</TableRow>
</TableHeader>
<TableBody>
{Array(3)
.fill(0)
.map((_, i) => (
<TableRow key={i}>
<TableCell className="font-medium">
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</ScrollArea>
<div>
<hr />
<p className="text-center mb-3">Manual Triggers</p>
<div className="flex flex-row justify-between">
<Button onClick={cameraTrigger}>Camera</Button>
<Button>Rfid</Button>
</div>
</div>
</LstCard>
);
}

View File

@@ -1,14 +1,27 @@
import {toast} from "sonner"; import { toast } from "sonner";
import {LstCard} from "../extendedUI/LstCard"; import { LstCard } from "../extendedUI/LstCard";
import {Button} from "../ui/button"; import { Button } from "../ui/button";
import {Input} from "../ui/input"; import { Input } from "../ui/input";
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "../ui/table"; import {
import {Skeleton} from "../ui/skeleton"; Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "../ui/table";
import { Skeleton } from "../ui/skeleton";
//import CycleCountLog from "./CycleCountLog"; //import CycleCountLog from "./CycleCountLog";
import {Select, SelectContent, SelectItem, SelectTrigger, SelectValue} from "../ui/select"; import {
import {Controller, useForm} from "react-hook-form"; Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "../ui/select";
import { Controller, useForm } from "react-hook-form";
import axios from "axios"; import axios from "axios";
import {useState} from "react"; import { useState } from "react";
export default function OcmeCycleCount() { export default function OcmeCycleCount() {
const token = localStorage.getItem("auth_token"); const token = localStorage.getItem("auth_token");
@@ -18,7 +31,7 @@ export default function OcmeCycleCount() {
register, register,
handleSubmit, handleSubmit,
//watch, //watch,
formState: {errors}, formState: { errors },
reset, reset,
control, control,
} = useForm(); } = useForm();
@@ -28,13 +41,22 @@ export default function OcmeCycleCount() {
setCounting(true); setCounting(true);
toast.success(`Cycle count started`); toast.success(`Cycle count started`);
try { try {
const res = await axios.post("/ocme/api/v1/cyclecount", data, { const res = await axios.post("/ocme/api/v1/cycleCount", data, {
headers: {Authorization: `Bearer ${token}`}, headers: { Authorization: `Bearer ${token}` },
}); });
if (res.data.success) {
toast.success(res.data.message); toast.success(res.data.message);
setData(res.data.data); setData(res.data.data);
setCounting(false); setCounting(false);
reset(); reset();
}
if (res.data.success) {
toast.success(res.data.message);
setCounting(false);
}
} catch (error) { } catch (error) {
toast.error("There was an error cycle counting"); toast.error("There was an error cycle counting");
setCounting(false); setCounting(false);
@@ -45,20 +67,25 @@ export default function OcmeCycleCount() {
<div className="flex flex-row w-screen"> <div className="flex flex-row w-screen">
<div className="m-2 w-5/6"> <div className="m-2 w-5/6">
<LstCard> <LstCard>
<p className="ml-2">Please enter the name or laneID you want to cycle count.</p> <p className="ml-2">
Please enter the name or laneID you want to cycle count.
</p>
<div> <div>
<form onSubmit={handleSubmit(onSubmit)}> <form onSubmit={handleSubmit(onSubmit)}>
<div className="flex justify-between"> <div className="flex justify-between">
<div className="m-2 flex flex-row"> <div className="m-2 flex flex-row">
<Input <Input
placeholder="enter lane: L064" placeholder="enter lane: L064"
className={errors.lane ? "border-red-500" : ""} className={
errors.lane ? "border-red-500" : ""
}
aria-invalid={!!errors.lane} aria-invalid={!!errors.lane}
{...register("lane", { {...register("lane", {
required: true, required: true,
minLength: { minLength: {
value: 3, value: 3,
message: "The lane is too short!", message:
"The lane is too short!",
}, },
})} })}
/> />
@@ -68,25 +95,39 @@ export default function OcmeCycleCount() {
name="laneType" name="laneType"
defaultValue={""} defaultValue={""}
render={({ render={({
field: {onChange}, field: { onChange },
fieldState: {}, fieldState: {},
//formState, //formState,
}) => ( }) => (
<Select onValueChange={onChange}> <Select
onValueChange={onChange}
>
<SelectTrigger className="w-[180px]"> <SelectTrigger className="w-[180px]">
<SelectValue placeholder="Select name or id" /> <SelectValue placeholder="Select name or id" />
</SelectTrigger> </SelectTrigger>
<SelectContent> <SelectContent>
<SelectItem value="name">Name</SelectItem> <SelectItem value="name">
<SelectItem value="laneId">Lane ID</SelectItem> Name
</SelectItem>
<SelectItem value="laneId">
Lane ID
</SelectItem>
</SelectContent> </SelectContent>
</Select> </Select>
)} )}
/> />
</div> </div>
</div> </div>
<Button className="m-2" type="submit" disabled={counting}> <Button
{counting ? <span>Counting...</span> : <span>CycleCount</span>} className="m-2"
type="submit"
disabled={counting}
>
{counting ? (
<span>Counting...</span>
) : (
<span>CycleCount</span>
)}
</Button> </Button>
</div> </div>
</form> </form>
@@ -105,7 +146,7 @@ export default function OcmeCycleCount() {
<TableHead>Result</TableHead> <TableHead>Result</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
{data.length === 0 ? ( {data?.length === 0 ? (
<TableBody> <TableBody>
{Array(10) {Array(10)
.fill(0) .fill(0)
@@ -142,7 +183,9 @@ export default function OcmeCycleCount() {
<> <>
{data.map((i: any) => { {data.map((i: any) => {
let classname = ``; let classname = ``;
if (i.info === "Quality Check Required") { if (
i.info === "Quality Check Required"
) {
classname = `bg-red-500`; classname = `bg-red-500`;
} }
if (i.info === "Sent to Inv") { if (i.info === "Sent to Inv") {
@@ -150,24 +193,46 @@ export default function OcmeCycleCount() {
} }
return ( return (
<TableRow key={i.runningNumber}> <TableRow key={i.runningNumber}>
<TableCell className={`font-medium ${classname}`}> <TableCell
className={`font-medium ${classname}`}
>
{i.alpla_laneID} {i.alpla_laneID}
</TableCell> </TableCell>
<TableCell className={`font-medium ${classname}`}> <TableCell
className={`font-medium ${classname}`}
>
{i.alpla_laneDescription} {i.alpla_laneDescription}
</TableCell> </TableCell>
<TableCell className={`font-medium ${classname}`}> <TableCell
className={`font-medium ${classname}`}
>
{i.Article} {i.Article}
</TableCell> </TableCell>
<TableCell className={`font-medium ${classname}`}> <TableCell
className={`font-medium ${classname}`}
>
{i.alpla_laneDescription} {i.alpla_laneDescription}
</TableCell> </TableCell>
<TableCell className={`font-medium ${classname}`}> <TableCell
className={`font-medium ${classname}`}
>
{i.runningNumber} {i.runningNumber}
</TableCell> </TableCell>
<TableCell className={`font-medium ${classname}`}>{i.ocme}</TableCell> <TableCell
<TableCell className={`font-medium ${classname}`}>{i.stock}</TableCell> className={`font-medium ${classname}`}
<TableCell className={`font-medium ${classname}`}>{i.info}</TableCell> >
{i.ocme}
</TableCell>
<TableCell
className={`font-medium ${classname}`}
>
{i.stock}
</TableCell>
<TableCell
className={`font-medium ${classname}`}
>
{i.info}
</TableCell>
</TableRow> </TableRow>
); );
})} })}

View File

@@ -1,23 +1,30 @@
import {LstCard} from "@/components/extendedUI/LstCard"; import { LstCard } from "@/components/extendedUI/LstCard";
import {Skeleton} from "@/components/ui/skeleton"; import { Skeleton } from "@/components/ui/skeleton";
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "@/components/ui/table"; import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
// import {useSessionStore} from "@/lib/store/sessionStore"; // import {useSessionStore} from "@/lib/store/sessionStore";
// import {useSettingStore} from "@/lib/store/useSettings"; // import {useSettingStore} from "@/lib/store/useSettings";
import {useQuery} from "@tanstack/react-query"; import { useQuery } from "@tanstack/react-query";
import {getlabels} from "@/utils/querys/production/labels"; import { getlabels } from "@/utils/querys/production/labels";
import {format} from "date-fns"; import { format } from "date-fns";
const labelLogs = [ const labelLogs = [
{key: "line", label: "Line"}, { key: "line", label: "Line" },
{key: "printerName", label: "Printer"}, { key: "printerName", label: "Printer" },
{key: "runningNr", label: "Running #"}, { key: "runningNr", label: "Running #" },
{key: "upd_date", label: "Label date"}, { key: "upd_date", label: "Label date" },
{key: "status", label: "Label Status"}, { key: "status", label: "Label Status" },
//{key: "reprint", label: "Reprint"}, // removing the reprint button for now until repritning is working as intended //{key: "reprint", label: "Reprint"}, // removing the reprint button for now until repritning is working as intended
]; ];
export default function LabelLog() { export default function LabelLog() {
const {data, isError, isLoading} = useQuery(getlabels("4")); const { data, isError, isLoading } = useQuery(getlabels("4"));
//const {user} = useSessionStore(); //const {user} = useSessionStore();
//const {settings} = useSettingStore(); //const {settings} = useSettingStore();
//const server = settings.filter((n) => n.name === "server")[0]?.value || ""; //const server = settings.filter((n) => n.name === "server")[0]?.value || "";
@@ -66,7 +73,7 @@ export default function LabelLog() {
</div> </div>
); );
} }
const labelData = data ? data : [];
return ( return (
<LstCard className="m-2 p-2 min-h-2/5"> <LstCard className="m-2 p-2 min-h-2/5">
<p className="text-center">Labels for the last 2 hours</p> <p className="text-center">Labels for the last 2 hours</p>
@@ -106,15 +113,26 @@ export default function LabelLog() {
</> </>
) : ( ) : (
<TableBody> <TableBody>
{data?.map((label: any) => ( {labelData.map((label: any) => (
<TableRow key={label.runningNr}> <TableRow key={label.runningNr}>
<TableCell className="font-medium">{label.line}</TableCell>
<TableCell className="font-medium">{label.printerName}</TableCell>
<TableCell className="font-medium">{label.runningNr}</TableCell>
<TableCell className="font-medium"> <TableCell className="font-medium">
{format(label.upd_date, "M/d/yyyy hh:mm")} {label.line}
</TableCell>
<TableCell className="font-medium">
{label.printerName}
</TableCell>
<TableCell className="font-medium">
{label.runningNr}
</TableCell>
<TableCell className="font-medium">
{format(
label?.upd_date.replace("Z", ""),
"M/d/yyyy hh:mm"
)}
</TableCell>
<TableCell className="font-medium">
{label.status}
</TableCell> </TableCell>
<TableCell className="font-medium">{label.status}</TableCell>
</TableRow> </TableRow>
))} ))}
</TableBody> </TableBody>

View File

@@ -1,14 +1,22 @@
import {LstCard} from "@/components/extendedUI/LstCard"; import { LstCard } from "@/components/extendedUI/LstCard";
import {Skeleton} from "@/components/ui/skeleton"; import { Skeleton } from "@/components/ui/skeleton";
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "@/components/ui/table"; import {
import {useSessionStore} from "@/lib/store/sessionStore"; Table,
import {useSettingStore} from "@/lib/store/useSettings"; TableBody,
import {LotType} from "@/types/lots"; TableCell,
import {getlots} from "@/utils/querys/production/lots"; TableHead,
import {useQuery} from "@tanstack/react-query"; TableHeader,
TableRow,
} from "@/components/ui/table";
import { useSessionStore } from "@/lib/store/sessionStore";
import { useSettingStore } from "@/lib/store/useSettings";
import { LotType } from "@/types/lots";
import { getlots } from "@/utils/querys/production/lots";
import { useQuery } from "@tanstack/react-query";
import ManualPrint from "./ManualPrinting/ManualPrint"; import ManualPrint from "./ManualPrinting/ManualPrint";
import ManualPrintForm from "./ManualPrinting/ManualPrintForm"; import ManualPrintForm from "./ManualPrinting/ManualPrintForm";
import { ScrollArea } from "@/components/ui/scroll-area";
let lotColumns = [ let lotColumns = [
{ {
@@ -57,13 +65,11 @@ let lotColumns = [
// }, // },
]; ];
export default function Lots() { export default function Lots() {
const {data, isError, isLoading} = useQuery(getlots()); const { data, isError, isLoading } = useQuery(getlots());
const {user} = useSessionStore(); const { user } = useSessionStore();
const {settings} = useSettingStore(); const { settings } = useSettingStore();
const server = settings.filter((n) => n.name === "server")[0]?.value || ""; const server = settings.filter((n) => n.name === "server")[0]?.value || "";
console.log(server);
const roles = ["admin", "manager", "operator"]; const roles = ["admin", "manager", "operator"];
if (user && roles.includes(user.role)) { if (user && roles.includes(user.role)) {
@@ -83,13 +89,16 @@ export default function Lots() {
if (isError) { if (isError) {
return ( return (
<div className="m-2 p-2 min-h-2/5"> <div className="m-2 p-2 min-h-2/5">
<ScrollArea className="max-h-1/2 rounded-md border p-4">
<LstCard> <LstCard>
<p className="text-center">Current Assigned lots</p> <p className="text-center">Current Assigned lots</p>
<Table> <Table>
<TableHeader> <TableHeader>
<TableRow> <TableRow>
{lotColumns.map((l) => ( {lotColumns.map((l) => (
<TableHead key={l.key}>{l.label}</TableHead> <TableHead key={l.key}>
{l.label}
</TableHead>
))} ))}
</TableRow> </TableRow>
</TableHeader> </TableHeader>
@@ -131,12 +140,14 @@ export default function Lots() {
</TableBody> </TableBody>
</Table> </Table>
</LstCard> </LstCard>
</ScrollArea>
</div> </div>
); );
} }
return ( return (
<LstCard className="m-2 p-2 min-h-2/5"> <LstCard className="m-2 p-2 min-h-2/5">
<ScrollArea className="h-[400px]">
<p className="text-center">Current Assigned lots</p> <p className="text-center">Current Assigned lots</p>
<Table> <Table>
<TableHeader> <TableHeader>
@@ -188,21 +199,40 @@ export default function Lots() {
<TableBody> <TableBody>
{data?.map((lot: LotType) => ( {data?.map((lot: LotType) => (
<TableRow key={lot.LabelOnlineID}> <TableRow key={lot.LabelOnlineID}>
<TableCell className="font-medium">{lot.MachineLocation}</TableCell> <TableCell className="font-medium">
<TableCell className="font-medium">{lot.AV}</TableCell> {lot.MachineLocation}
<TableCell className="font-medium">{lot.Alias}</TableCell> </TableCell>
<TableCell className="font-medium">{lot.LOT}</TableCell> <TableCell className="font-medium">
<TableCell className="font-medium">{lot.ProlinkLot}</TableCell> {lot.AV}
<TableCell className="font-medium">{lot.PlannedQTY}</TableCell> </TableCell>
<TableCell className="font-medium">{lot.Produced}</TableCell> <TableCell className="font-medium">
<TableCell className="font-medium">{lot.Remaining}</TableCell> {lot.Alias}
<TableCell className="font-medium">{lot.overPrinting}</TableCell> </TableCell>
<TableCell className="font-medium">
{lot.LOT}
</TableCell>
<TableCell className="font-medium">
{lot.ProlinkLot}
</TableCell>
<TableCell className="font-medium">
{lot.PlannedQTY}
</TableCell>
<TableCell className="font-medium">
{lot.Produced}
</TableCell>
<TableCell className="font-medium">
{lot.Remaining}
</TableCell>
<TableCell className="font-medium">
{lot.overPrinting}
</TableCell>
{user && roles.includes(user.role) && ( {user && roles.includes(user.role) && (
<> <>
{server === "usday1vms006" || server === "localhost" ? ( {server === "usday1vms006" ||
server === "localhost" ? (
<> <>
<TableCell className="flex justify-center"> <TableCell className="flex justify-center">
<ManualPrintForm lot={lot} /> <ManualPrintForm />
</TableCell> </TableCell>
</> </>
) : ( ) : (
@@ -217,6 +247,7 @@ export default function Lots() {
</TableBody> </TableBody>
)} )}
</Table> </Table>
</ScrollArea>
</LstCard> </LstCard>
); );
} }

View File

@@ -1,4 +1,4 @@
import {Button} from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { import {
Dialog, Dialog,
DialogContent, DialogContent,
@@ -8,8 +8,8 @@ import {
DialogTitle, DialogTitle,
DialogTrigger, DialogTrigger,
} from "@/components/ui/dialog"; } from "@/components/ui/dialog";
import {Input} from "@/components/ui/input"; import { Input } from "@/components/ui/input";
import {Label} from "@/components/ui/label"; import { Label } from "@/components/ui/label";
import { import {
Select, Select,
SelectContent, SelectContent,
@@ -19,81 +19,81 @@ import {
SelectTrigger, SelectTrigger,
SelectValue, SelectValue,
} from "@/components/ui/select"; } from "@/components/ui/select";
import {Textarea} from "@/components/ui/textarea"; import { Textarea } from "@/components/ui/textarea";
import {useSessionStore} from "@/lib/store/sessionStore"; import { useSettingStore } from "@/lib/store/useSettings";
import {useSettingStore} from "@/lib/store/useSettings";
import {LotType} from "@/types/lots";
import axios from "axios"; import axios from "axios";
import {Tag} from "lucide-react"; import { Tag } from "lucide-react";
import {useState} from "react"; import { useState } from "react";
import {Controller, useForm} from "react-hook-form"; import { Controller, useForm } from "react-hook-form";
import {toast} from "sonner"; import { toast } from "sonner";
import {manualPrintLabels} from "./ManualPrintLabel";
const printReason = [ const printReason = [
{key: "printerIssue", label: "Printer Related"}, { key: "printerIssue", label: "Printer Related" },
{key: "strapper", label: "Strapper Error"}, { key: "missingRfidTag", label: "Missing or incorrect tag" },
{key: "manualCheck", label: "20th pallet check"}, { key: "strapper", label: "Strapper Error" },
{key: "outOfSync", label: "Labeler Out of Sync"}, { key: "manualCheck", label: "20th pallet check" },
{ key: "outOfSync", label: "Labeler Out of Sync" },
]; ];
export default function ManualPrintForm({lot}: {lot: LotType}) { export default function ManualPrintForm() {
const {user} = useSessionStore();
const token = localStorage.getItem("auth_token"); const token = localStorage.getItem("auth_token");
const {settings} = useSettingStore(); const { settings } = useSettingStore();
const [open, setOpen] = useState(false); const [open, setOpen] = useState(false);
const server = settings.filter((n) => n.name === "server")[0]?.value; const server = settings.filter((n) => n.name === "server")[0]?.value;
// const serverPort = settings.filter((n) => n.name === "serverPort")[0]?.value; // const serverPort = settings.filter((n) => n.name === "serverPort")[0]?.value;
// const serverUrl = `http://${server}:${serverPort}`; // const serverUrl = `http://${server}:${serverPort}`;
// what is the dyco set to? rfid or dyco
const dyco = settings.filter((n) => n.name === "dycoPrint");
const { const {
register, register,
handleSubmit, handleSubmit,
//watch, //watch,
formState: {errors}, formState: { errors },
reset, reset,
control, control,
} = useForm(); } = useForm();
const handlePrintLabel = async (lot: LotType) => { const handleManualPrintLog = async (logData: any) => {
//console.log(lot);
const labels: any = await manualPrintLabels(lot, user);
if (labels.success) {
toast.success(labels.message);
} else {
toast.error(labels.message);
}
};
const handleManualPrintLog = async (logData: any, lot: LotType) => {
// toast.success(`A new label was sent to printer: ${lot.PrinterName} for line ${lot.MachineDescription} `); // toast.success(`A new label was sent to printer: ${lot.PrinterName} for line ${lot.MachineDescription} `);
const logdataUrl = `/api/ocp/manualLabelLog`; const logdataUrl = `/api/ocp/manuallabellog`;
axios axios
.post(logdataUrl, logData, {headers: {Authorization: `Bearer ${token}`}}) .post(logdataUrl, logData, {
headers: { Authorization: `Bearer ${token}` },
})
.then((d) => { .then((d) => {
//console.log(d); console.log(d);
if (d.data.success) {
toast.success(d.data.message); toast.success(d.data.message);
handlePrintLabel(lot); } else {
toast.error(d.data.message);
}
reset(); reset();
setOpen(false);
}) })
.catch((e) => { .catch((e) => {
if (e.response.status === 500) { if (e.response.status === 500) {
toast.error(`Internal Server error please try again.`); toast.error(`Internal Server error please try again.`);
return {sucess: false}; return { sucess: false };
} }
if (e.response.status === 401) { if (e.response.status === 401) {
//console.log(e.response); //console.log(e.response);
toast.error(`You are not authorized to do this.`); toast.error(`You are not authorized to do this.`);
return {sucess: false}; return { sucess: false };
} }
}); });
}; };
const onSubmit = (data: any) => { const onSubmit = (data: any) => {
console.log(data); //console.log(data);
handleManualPrintLog(data, lot); handleManualPrintLog(data);
};
const closeForm = () => {
reset();
setOpen(false);
}; };
return ( return (
<Dialog <Dialog
@@ -117,12 +117,14 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
<DialogHeader> <DialogHeader>
<DialogTitle>Edit profile</DialogTitle> <DialogTitle>Edit profile</DialogTitle>
<DialogDescription> <DialogDescription>
Make changes to your profile here. Click save when you're done. Make changes to your profile here. Click save when
you're done.
</DialogDescription> </DialogDescription>
</DialogHeader> </DialogHeader>
<form onSubmit={handleSubmit(onSubmit)}> <form onSubmit={handleSubmit(onSubmit)}>
<p> <p>
To manually print a label you must complete all the required fields below. To manually print a label you must complete all the
required fields below.
<br /> <br />
If you clicked this in error just click close If you clicked this in error just click close
</p> </p>
@@ -133,7 +135,7 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
name="printReason" name="printReason"
defaultValue={""} defaultValue={""}
render={({ render={({
field: {onChange}, field: { onChange },
fieldState: {}, fieldState: {},
//formState, //formState,
}) => ( }) => (
@@ -143,35 +145,46 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
</SelectTrigger> </SelectTrigger>
<SelectContent> <SelectContent>
<SelectGroup> <SelectGroup>
<SelectLabel>Print Reasons</SelectLabel> <SelectLabel>
{printReason.map((printReason: any) => ( Print Reasons
<SelectItem value={printReason.key}>{printReason.label}</SelectItem> </SelectLabel>
))} {printReason.map(
(printReason: any) => (
<SelectItem
value={printReason.key}
>
{printReason.label}
</SelectItem>
)
)}
</SelectGroup> </SelectGroup>
</SelectContent> </SelectContent>
</Select> </Select>
)} )}
/> />
) : ( ) : (
<div> <div className="m-2">
<Label htmlFor="printRason" className="m-1"> <Label htmlFor="printRason" className="m-1">
Why are you manually printing? Why are you manually printing?
</Label> </Label>
<Input <Input
type="text" type="text"
className={errors.printReason ? "border-red-500" : ""} className={
errors.printReason ? "border-red-500" : ""
}
aria-invalid={!!errors.printReason} aria-invalid={!!errors.printReason}
{...register("printReason", { {...register("printReason", {
required: true, required: true,
minLength: { minLength: {
value: 5, value: 5,
message: "To short of a reason please try again!", message:
"To short of a reason please try again!",
}, },
})} })}
/> />
</div> </div>
)} )}
<div> <div className="m-2">
<Label htmlFor="line" className="m-1"> <Label htmlFor="line" className="m-1">
"What is the line number you are printing?" "What is the line number you are printing?"
</Label> </Label>
@@ -180,11 +193,11 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
type="number" type="number"
className={errors.line ? "border-red-500" : ""} className={errors.line ? "border-red-500" : ""}
aria-invalid={!!errors.line} aria-invalid={!!errors.line}
{...register("line", {required: true})} {...register("line", { required: true })}
/> />
</div> </div>
<div> <div className="m-2">
<Label htmlFor="initials" className="m-1"> <Label htmlFor="initials" className="m-1">
Enter intials Enter intials
</Label> </Label>
@@ -192,23 +205,55 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
//variant="underlined" //variant="underlined"
//label="Enter intials" //label="Enter intials"
{...register("initials", {required: true})} {...register("initials", { required: true })}
/> />
</div> </div>
<hr />
{dyco[0].value === "0" && (
<div>
<p>Enter the missing tag number.</p>
<hr />
<Label htmlFor="rfidTag" className="m-1">
Enter the tag number only Example
ALPLA000002541. only enter 2541
</Label>
<Input
type="text"
className={
errors.printReason ? "border-red-500" : ""
}
aria-invalid={!!errors.printReason}
{...register("rfidTag", {
required: true,
minLength: {
value: 1,
message: "Tag number is to short!",
},
})}
/>
</div>
)}
<div className="m-2">
<Textarea <Textarea
//label="Comments" //label="Comments"
placeholder="add more info as needed." placeholder="add more info as needed."
{...register("additionalComments")} {...register("additionalComments")}
/> />
</div>
<DialogFooter> <DialogFooter>
<Button color="danger" variant="default" onClick={() => setOpen(!open)}> <div className="mt-3">
<Button
color="danger"
variant="default"
onClick={closeForm}
>
Close Close
</Button> </Button>
<Button color="primary" type="submit"> <Button color="primary" type="submit">
Print Print
</Button> </Button>
</div>
</DialogFooter> </DialogFooter>
</form> </form>
</DialogContent> </DialogContent>

View File

@@ -1,15 +1,15 @@
import {LotType} from "@/types/lots"; import { LotType } from "@/types/lots";
import axios from "axios"; import axios from "axios";
export const manualPrintLabels = async (lot: LotType, user: any) => { export const manualPrintLabels = async (lot: LotType, user: any) => {
//console.log(lot); //console.log(lot);
const labelUrl = `/ocp/manualPrintAndFollow`; const labelUrl = `/api/ocp/manualprintandfollow`;
try { try {
const res = await axios.post( const res = await axios.post(
labelUrl, labelUrl,
{line: lot.MachineLocation, printerName: lot.PrinterName}, { line: lot.MachineLocation, printerName: lot.PrinterName },
{headers: {Authorization: `Basic ${user?.prod}`}} { headers: { Authorization: `Basic ${user?.prod}` } }
); );
if (res.data.success) { if (res.data.success) {
@@ -19,7 +19,7 @@ export const manualPrintLabels = async (lot: LotType, user: any) => {
}; };
} else { } else {
return { return {
success: true, success: false,
message: `Line ${lot.MachineDescription} encountered an error printing labels: ${res.data.message}`, message: `Line ${lot.MachineDescription} encountered an error printing labels: ${res.data.message}`,
}; };
} }

View File

@@ -1,5 +1,154 @@
import {LstCard} from "@/components/extendedUI/LstCard"; import { LstCard } from "@/components/extendedUI/LstCard";
import { Button } from "@/components/ui/button";
import { Skeleton } from "@/components/ui/skeleton";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
import { getOcpLogs } from "@/utils/querys/production/ocpLogs";
import { useQuery } from "@tanstack/react-query";
import axios from "axios";
import { format } from "date-fns";
import { Trash } from "lucide-react";
import { toast } from "sonner";
const labelLogs = [
{ key: "message", label: "Error Message" },
{ key: "created_at", label: "ErrorDat" },
{ key: "clear", label: "Clear" },
//{key: "reprint", label: "Reprint"}, // removing the reprint button for now until repritning is working as intended
];
export default function OcpLogs() { export default function OcpLogs() {
return <LstCard className="m-2 p-2">Ocp Logs</LstCard>; const { data, isError, isLoading } = useQuery(getOcpLogs("4"));
const clearLog = async (log: any) => {
try {
const res = await axios.patch(`/api/logger/logs/${log.log_id}`);
if (res.data.success) {
toast.success(`Log message: ${log.message}, was just cleared`);
} else {
console.log(res);
toast.error(`There was an error clearing the message.`);
}
} catch (error) {
toast.error(`There was an error trying to clearing the message.`);
}
};
const logData = data ? data : [];
if (isError) {
return (
<div className="m-2 p-2 min-h-2/5">
<LstCard>
<p className="text-center">Labels for the last 2 hours</p>
<Table>
<TableHeader>
<TableRow>
{labelLogs.map((l) => (
<TableHead key={l.key}>{l.label}</TableHead>
))}
</TableRow>
</TableHeader>
<TableBody>
{Array(7)
.fill(0)
.map((_, i) => (
<TableRow key={i}>
<TableCell className="font-medium">
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</LstCard>
</div>
);
}
return (
<LstCard className="m-2 p-2 min-h-2/5">
<p className="text-center">Labels for the last 2 hours</p>
<Table>
<TableHeader>
<TableRow>
{labelLogs.map((l) => (
<TableHead key={l.key}>{l.label}</TableHead>
))}
</TableRow>
</TableHeader>
{isLoading ? (
<>
<TableBody>
{Array(7)
.fill(0)
.map((_, i) => (
<TableRow key={i}>
<TableCell className="font-medium">
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
<TableCell>
<Skeleton className="h-4" />
</TableCell>
</TableRow>
))}
</TableBody>
</>
) : (
<TableBody>
{logData.map((label: any) => (
<TableRow key={label.log_id}>
<TableCell className="font-medium max-w-5/6">
<p className="text-balance">
{label.message}
</p>
</TableCell>
<TableCell className="font-medium">
{format(
label?.created_at.replace("Z", ""),
"M/d/yyyy hh:mm"
)}
</TableCell>
<TableCell className="font-medium">
<Button
size="icon"
onClick={() => clearLog(label)}
>
<Trash />
</Button>
</TableCell>
</TableRow>
))}
</TableBody>
)}
</Table>
</LstCard>
);
} }

View File

@@ -1,30 +1,55 @@
import WrapperManualTrigger from "@/components/ocme/WrapperCard";
import LabelLog from "./LabelLog"; import LabelLog from "./LabelLog";
import Lots from "./Lots"; import Lots from "./Lots";
import OcpLogs from "./OcpLogs"; import OcpLogs from "./OcpLogs";
import PrinterStatus from "./PrinterStatus"; import PrinterStatus from "./PrinterStatus";
import { useSettingStore } from "@/lib/store/useSettings";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
export default function OCPPage() { export default function OCPPage() {
const { settings } = useSettingStore();
const server = settings.filter((n) => n.plantToken === "usday1");
return ( return (
<div className="h-dvh w-full overflow-hidden"> <div className="h-screen w-full ">
<div className="flex flex-wrap gap-2"> <div className="flex flex-wrap gap-2">
<div className="flex flex-col w-4/5 h-dvh"> <div className="flex flex-col w-4/5 h-dvh">
<div className=""> <div className="">
<Lots /> <Lots />
</div> </div>
<div className="flex flex-row"> <div className="w-5/6 h-1/2">
<div className="w-1/2"> <Tabs defaultValue="ocplogs" className="w-full">
<LabelLog /> <TabsList className="grid w-full grid-cols-2">
</div> <TabsTrigger value="ocplogs">
<div className="w-1/2"> OcpLogs
</TabsTrigger>
<TabsTrigger value="labels">Labels</TabsTrigger>
</TabsList>
<TabsContent value="ocplogs">
<div className="w-full">
<OcpLogs /> <OcpLogs />
</div> </div>
</TabsContent>
<TabsContent value="labels">
<div className="w-full">
<LabelLog />
</div>
</TabsContent>
</Tabs>
</div> </div>
</div> </div>
<div className="w-1/6"> <div className="w-1/6 flex flex-col">
{server && (
<div>
<WrapperManualTrigger />
</div>
)}
<div>
<PrinterStatus /> <PrinterStatus />
</div> </div>
</div> </div>
</div> </div>
</div>
); );
} }

View File

@@ -1,6 +1,14 @@
import {LstCard} from "@/components/extendedUI/LstCard"; import { LstCard } from "@/components/extendedUI/LstCard";
import {Skeleton} from "@/components/ui/skeleton"; import { ScrollArea } from "@/components/ui/scroll-area";
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "@/components/ui/table"; import { Skeleton } from "@/components/ui/skeleton";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
let printerCols = [ let printerCols = [
{ {
@@ -19,7 +27,9 @@ let printerCols = [
export default function PrinterStatus() { export default function PrinterStatus() {
return ( return (
<LstCard className="m-2 p-2"> <LstCard className="m-2 p-2">
<ScrollArea className="max-h-[300px]">
<p className="text-center">Printer Status</p> <p className="text-center">Printer Status</p>
<Table> <Table>
<TableHeader> <TableHeader>
<TableRow> <TableRow>
@@ -30,7 +40,7 @@ export default function PrinterStatus() {
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{Array(10) {Array(5)
.fill(0) .fill(0)
.map((_, i) => ( .map((_, i) => (
<TableRow key={i}> <TableRow key={i}>
@@ -47,6 +57,7 @@ export default function PrinterStatus() {
))} ))}
</TableBody> </TableBody>
</Table> </Table>
</ScrollArea>
</LstCard> </LstCard>
); );
} }

View File

@@ -0,0 +1,64 @@
import * as React from "react"
import * as AccordionPrimitive from "@radix-ui/react-accordion"
import { ChevronDownIcon } from "lucide-react"
import { cn } from "@/lib/utils"
function Accordion({
...props
}: React.ComponentProps<typeof AccordionPrimitive.Root>) {
return <AccordionPrimitive.Root data-slot="accordion" {...props} />
}
function AccordionItem({
className,
...props
}: React.ComponentProps<typeof AccordionPrimitive.Item>) {
return (
<AccordionPrimitive.Item
data-slot="accordion-item"
className={cn("border-b last:border-b-0", className)}
{...props}
/>
)
}
function AccordionTrigger({
className,
children,
...props
}: React.ComponentProps<typeof AccordionPrimitive.Trigger>) {
return (
<AccordionPrimitive.Header className="flex">
<AccordionPrimitive.Trigger
data-slot="accordion-trigger"
className={cn(
"focus-visible:border-ring focus-visible:ring-ring/50 flex flex-1 items-start justify-between gap-4 rounded-md py-4 text-left text-sm font-medium transition-all outline-none hover:underline focus-visible:ring-[3px] disabled:pointer-events-none disabled:opacity-50 [&[data-state=open]>svg]:rotate-180",
className
)}
{...props}
>
{children}
<ChevronDownIcon className="text-muted-foreground pointer-events-none size-4 shrink-0 translate-y-0.5 transition-transform duration-200" />
</AccordionPrimitive.Trigger>
</AccordionPrimitive.Header>
)
}
function AccordionContent({
className,
children,
...props
}: React.ComponentProps<typeof AccordionPrimitive.Content>) {
return (
<AccordionPrimitive.Content
data-slot="accordion-content"
className="data-[state=closed]:animate-accordion-up data-[state=open]:animate-accordion-down overflow-hidden text-sm"
{...props}
>
<div className={cn("pt-0 pb-4", className)}>{children}</div>
</AccordionPrimitive.Content>
)
}
export { Accordion, AccordionItem, AccordionTrigger, AccordionContent }

View File

@@ -0,0 +1,45 @@
"use client";
import { GripVertical } from "lucide-react";
import * as ResizablePrimitive from "react-resizable-panels";
import { cn } from "@/lib/utils";
const ResizablePanelGroup = ({
className,
...props
}: React.ComponentProps<typeof ResizablePrimitive.PanelGroup>) => (
<ResizablePrimitive.PanelGroup
className={cn(
"flex h-full w-full data-[panel-group-direction=vertical]:flex-col",
className
)}
{...props}
/>
);
const ResizablePanel = ResizablePrimitive.Panel;
const ResizableHandle = ({
withHandle,
className,
...props
}: React.ComponentProps<typeof ResizablePrimitive.PanelResizeHandle> & {
withHandle?: boolean;
}) => (
<ResizablePrimitive.PanelResizeHandle
className={cn(
"relative flex w-px items-center justify-center bg-border after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring focus-visible:ring-offset-1 data-[panel-group-direction=vertical]:h-px data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:-translate-y-1/2 data-[panel-group-direction=vertical]:after:translate-x-0 [&[data-panel-group-direction=vertical]>div]:rotate-90",
className
)}
{...props}
>
{withHandle && (
<div className="z-10 flex h-4 w-3 items-center justify-center rounded-sm border bg-border">
<GripVertical className="h-2.5 w-2.5" />
</div>
)}
</ResizablePrimitive.PanelResizeHandle>
);
export { ResizablePanelGroup, ResizablePanel, ResizableHandle };

View File

@@ -0,0 +1,56 @@
import * as React from "react"
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area"
import { cn } from "@/lib/utils"
function ScrollArea({
className,
children,
...props
}: React.ComponentProps<typeof ScrollAreaPrimitive.Root>) {
return (
<ScrollAreaPrimitive.Root
data-slot="scroll-area"
className={cn("relative", className)}
{...props}
>
<ScrollAreaPrimitive.Viewport
data-slot="scroll-area-viewport"
className="ring-ring/10 dark:ring-ring/20 dark:outline-ring/40 outline-ring/50 size-full rounded-[inherit] transition-[color,box-shadow] focus-visible:ring-4 focus-visible:outline-1"
>
{children}
</ScrollAreaPrimitive.Viewport>
<ScrollBar />
<ScrollAreaPrimitive.Corner />
</ScrollAreaPrimitive.Root>
)
}
function ScrollBar({
className,
orientation = "vertical",
...props
}: React.ComponentProps<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>) {
return (
<ScrollAreaPrimitive.ScrollAreaScrollbar
data-slot="scroll-area-scrollbar"
orientation={orientation}
className={cn(
"flex touch-none p-px transition-colors select-none",
orientation === "vertical" &&
"h-full w-2.5 border-l border-l-transparent",
orientation === "horizontal" &&
"h-2.5 flex-col border-t border-t-transparent",
className
)}
{...props}
>
<ScrollAreaPrimitive.ScrollAreaThumb
data-slot="scroll-area-thumb"
className="bg-border relative flex-1 rounded-full"
/>
</ScrollAreaPrimitive.ScrollAreaScrollbar>
)
}
export { ScrollArea, ScrollBar }

View File

@@ -20,6 +20,7 @@ import { Route as IndexImport } from './routes/index'
import { Route as OcpIndexImport } from './routes/ocp/index' import { Route as OcpIndexImport } from './routes/ocp/index'
import { Route as EomEomImport } from './routes/_eom/eom' import { Route as EomEomImport } from './routes/_eom/eom'
import { Route as AuthProfileImport } from './routes/_auth/profile' import { Route as AuthProfileImport } from './routes/_auth/profile'
import { Route as AdminUsersImport } from './routes/_admin/users'
import { Route as AdminSettingsImport } from './routes/_admin/settings' import { Route as AdminSettingsImport } from './routes/_admin/settings'
import { Route as AdminServersImport } from './routes/_admin/servers' import { Route as AdminServersImport } from './routes/_admin/servers'
import { Route as AdminModulesImport } from './routes/_admin/modules' import { Route as AdminModulesImport } from './routes/_admin/modules'
@@ -82,6 +83,12 @@ const AuthProfileRoute = AuthProfileImport.update({
getParentRoute: () => AuthRoute, getParentRoute: () => AuthRoute,
} as any) } as any)
const AdminUsersRoute = AdminUsersImport.update({
id: '/users',
path: '/users',
getParentRoute: () => AdminRoute,
} as any)
const AdminSettingsRoute = AdminSettingsImport.update({ const AdminSettingsRoute = AdminSettingsImport.update({
id: '/settings', id: '/settings',
path: '/settings', path: '/settings',
@@ -200,6 +207,13 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof AdminSettingsImport preLoaderRoute: typeof AdminSettingsImport
parentRoute: typeof AdminImport parentRoute: typeof AdminImport
} }
'/_admin/users': {
id: '/_admin/users'
path: '/users'
fullPath: '/users'
preLoaderRoute: typeof AdminUsersImport
parentRoute: typeof AdminImport
}
'/_auth/profile': { '/_auth/profile': {
id: '/_auth/profile' id: '/_auth/profile'
path: '/profile' path: '/profile'
@@ -265,12 +279,14 @@ interface AdminRouteChildren {
AdminModulesRoute: typeof AdminModulesRoute AdminModulesRoute: typeof AdminModulesRoute
AdminServersRoute: typeof AdminServersRoute AdminServersRoute: typeof AdminServersRoute
AdminSettingsRoute: typeof AdminSettingsRoute AdminSettingsRoute: typeof AdminSettingsRoute
AdminUsersRoute: typeof AdminUsersRoute
} }
const AdminRouteChildren: AdminRouteChildren = { const AdminRouteChildren: AdminRouteChildren = {
AdminModulesRoute: AdminModulesRoute, AdminModulesRoute: AdminModulesRoute,
AdminServersRoute: AdminServersRoute, AdminServersRoute: AdminServersRoute,
AdminSettingsRoute: AdminSettingsRoute, AdminSettingsRoute: AdminSettingsRoute,
AdminUsersRoute: AdminUsersRoute,
} }
const AdminRouteWithChildren = AdminRoute._addFileChildren(AdminRouteChildren) const AdminRouteWithChildren = AdminRoute._addFileChildren(AdminRouteChildren)
@@ -305,6 +321,7 @@ export interface FileRoutesByFullPath {
'/modules': typeof AdminModulesRoute '/modules': typeof AdminModulesRoute
'/servers': typeof AdminServersRoute '/servers': typeof AdminServersRoute
'/settings': typeof AdminSettingsRoute '/settings': typeof AdminSettingsRoute
'/users': typeof AdminUsersRoute
'/profile': typeof AuthProfileRoute '/profile': typeof AuthProfileRoute
'/eom': typeof EomEomRoute '/eom': typeof EomEomRoute
'/ocp': typeof OcpIndexRoute '/ocp': typeof OcpIndexRoute
@@ -323,6 +340,7 @@ export interface FileRoutesByTo {
'/modules': typeof AdminModulesRoute '/modules': typeof AdminModulesRoute
'/servers': typeof AdminServersRoute '/servers': typeof AdminServersRoute
'/settings': typeof AdminSettingsRoute '/settings': typeof AdminSettingsRoute
'/users': typeof AdminUsersRoute
'/profile': typeof AuthProfileRoute '/profile': typeof AuthProfileRoute
'/eom': typeof EomEomRoute '/eom': typeof EomEomRoute
'/ocp': typeof OcpIndexRoute '/ocp': typeof OcpIndexRoute
@@ -344,6 +362,7 @@ export interface FileRoutesById {
'/_admin/modules': typeof AdminModulesRoute '/_admin/modules': typeof AdminModulesRoute
'/_admin/servers': typeof AdminServersRoute '/_admin/servers': typeof AdminServersRoute
'/_admin/settings': typeof AdminSettingsRoute '/_admin/settings': typeof AdminSettingsRoute
'/_admin/users': typeof AdminUsersRoute
'/_auth/profile': typeof AuthProfileRoute '/_auth/profile': typeof AuthProfileRoute
'/_eom/eom': typeof EomEomRoute '/_eom/eom': typeof EomEomRoute
'/ocp/': typeof OcpIndexRoute '/ocp/': typeof OcpIndexRoute
@@ -364,6 +383,7 @@ export interface FileRouteTypes {
| '/modules' | '/modules'
| '/servers' | '/servers'
| '/settings' | '/settings'
| '/users'
| '/profile' | '/profile'
| '/eom' | '/eom'
| '/ocp' | '/ocp'
@@ -381,6 +401,7 @@ export interface FileRouteTypes {
| '/modules' | '/modules'
| '/servers' | '/servers'
| '/settings' | '/settings'
| '/users'
| '/profile' | '/profile'
| '/eom' | '/eom'
| '/ocp' | '/ocp'
@@ -400,6 +421,7 @@ export interface FileRouteTypes {
| '/_admin/modules' | '/_admin/modules'
| '/_admin/servers' | '/_admin/servers'
| '/_admin/settings' | '/_admin/settings'
| '/_admin/users'
| '/_auth/profile' | '/_auth/profile'
| '/_eom/eom' | '/_eom/eom'
| '/ocp/' | '/ocp/'
@@ -472,7 +494,8 @@ export const routeTree = rootRoute
"children": [ "children": [
"/_admin/modules", "/_admin/modules",
"/_admin/servers", "/_admin/servers",
"/_admin/settings" "/_admin/settings",
"/_admin/users"
] ]
}, },
"/_auth": { "/_auth": {
@@ -506,6 +529,10 @@ export const routeTree = rootRoute
"filePath": "_admin/settings.tsx", "filePath": "_admin/settings.tsx",
"parent": "/_admin" "parent": "/_admin"
}, },
"/_admin/users": {
"filePath": "_admin/users.tsx",
"parent": "/_admin"
},
"/_auth/profile": { "/_auth/profile": {
"filePath": "_auth/profile.tsx", "filePath": "_auth/profile.tsx",
"parent": "/_auth" "parent": "/_auth"

View File

@@ -0,0 +1,10 @@
import UserPage from "@/components/admin/user/UserPage";
import { createFileRoute } from "@tanstack/react-router";
export const Route = createFileRoute("/_admin/users")({
component: RouteComponent,
});
function RouteComponent() {
return <UserPage />;
}

View File

@@ -0,0 +1,5 @@
import { Button } from "@/components/ui/button";
export const DebugButton = (data: any) => {
return <Button onClick={() => console.log(data.data)}>Debug</Button>;
};

View File

@@ -0,0 +1,14 @@
import { formOptions } from "@tanstack/react-form";
export const userFormOptions = (user: any) => {
return formOptions({
defaultValues: {
username: user.username,
password: "",
email: user.email,
role: user.role,
//hobbies: [],
},
// } as Person,
});
};

View File

@@ -0,0 +1,27 @@
export const generatePassword = (length: number) => {
const uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
const lowercase = "abcdefghijklmnopqrstuvwxyz";
const numbers = "0123456789";
const symbols = "!@#$%&()_+-={}:,.<>?/"; // Safe symbol list
// Ensure the password contains at least one of each required type
let password: any = [
uppercase[Math.floor(Math.random() * uppercase.length)],
lowercase[Math.floor(Math.random() * lowercase.length)],
numbers[Math.floor(Math.random() * numbers.length)],
symbols[Math.floor(Math.random() * symbols.length)],
];
// Fill the rest of the password with random characters from all sets
const allCharacters = uppercase + lowercase;
for (let i = password.length; i < length; i++) {
password.push(
allCharacters[Math.floor(Math.random() * allCharacters.length)]
);
}
// Shuffle the password to avoid predictable patterns
password = password.sort(() => Math.random() - 0.5).join("");
return password;
};

View File

@@ -0,0 +1,26 @@
import { queryOptions } from "@tanstack/react-query";
import axios from "axios";
export function getUsers() {
const token = localStorage.getItem("auth_token");
return queryOptions({
queryKey: ["getUsers"],
queryFn: () => fetchUsers(token),
enabled: !!token, // Prevents query if token is null
staleTime: 1000,
//refetchInterval: 2 * 2000,
refetchOnWindowFocus: true,
});
}
const fetchUsers = async (token: string | null) => {
const { data } = await axios.get(`/api/auth/allusers`, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
});
// if we are not localhost ignore the devDir setting.
//const url: string = window.location.host.split(":")[0];
return data.data ?? [];
};

View File

@@ -1,4 +1,4 @@
import {queryOptions} from "@tanstack/react-query"; import { queryOptions } from "@tanstack/react-query";
import axios from "axios"; import axios from "axios";
export function getlabels(hours: string) { export function getlabels(hours: string) {
@@ -7,13 +7,13 @@ export function getlabels(hours: string) {
queryFn: () => fetchSettings(hours), queryFn: () => fetchSettings(hours),
staleTime: 1000, staleTime: 1000,
//refetchInterval: 2500, refetchInterval: 2 * 2000,
refetchOnWindowFocus: true, refetchOnWindowFocus: true,
}); });
} }
const fetchSettings = async (hours: string) => { const fetchSettings = async (hours: string) => {
const {data} = await axios.get(`/api/v1/ocp/labels?hours=${hours}`); const { data } = await axios.get(`/api/ocp/getlabels?hours=${hours}`);
// if we are not localhost ignore the devDir setting. // if we are not localhost ignore the devDir setting.
//const url: string = window.location.host.split(":")[0]; //const url: string = window.location.host.split(":")[0];
return data.data ?? []; return data.data ?? [];

View File

@@ -1,4 +1,4 @@
import {queryOptions} from "@tanstack/react-query"; import { queryOptions } from "@tanstack/react-query";
import axios from "axios"; import axios from "axios";
export function getlots() { export function getlots() {
@@ -7,13 +7,13 @@ export function getlots() {
queryFn: () => fetchSettings(), queryFn: () => fetchSettings(),
staleTime: 10 * 1000, staleTime: 10 * 1000,
//refetchInterval: 10 * 1000, refetchInterval: 10 * 1000,
refetchOnWindowFocus: true, refetchOnWindowFocus: true,
}); });
} }
const fetchSettings = async () => { const fetchSettings = async () => {
const {data} = await axios.get("/api/v1/ocp/lots"); const { data } = await axios.get("/api/ocp/getlots");
// if we are not localhost ignore the devDir setting. // if we are not localhost ignore the devDir setting.
//const url: string = window.location.host.split(":")[0]; //const url: string = window.location.host.split(":")[0];
let lotData = data.data; let lotData = data.data;

View File

@@ -0,0 +1,22 @@
import { queryOptions } from "@tanstack/react-query";
import axios from "axios";
export function getOcpLogs(hours: string) {
return queryOptions({
queryKey: ["ocpLogs"],
queryFn: () => fetchSettings(hours),
staleTime: 1000,
refetchInterval: 2 * 1000,
refetchOnWindowFocus: true,
});
}
const fetchSettings = async (hours: string) => {
const { data } = await axios.get(
`/api/logger/logs?service=ocp&service=rfid&level=error&level=warn&hours=${hours}`
);
// if we are not localhost ignore the devDir setting.
//const url: string = window.location.host.split(":")[0];
return data.data ?? [];
};

865
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "lstv2", "name": "lstv2",
"version": "2.9.0", "version": "2.11.0",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"", "dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"",
@@ -9,7 +9,7 @@
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts", "dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts", "dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server && npm run build:frontend", "build": "npm run build:server && npm run build:frontend",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts", "build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y",
"build:frontend": "cd frontend && npm run build", "build:frontend": "cd frontend && npm run build",
"copy:scripts": "tsx server/scripts/copyScripts.ts", "copy:scripts": "tsx server/scripts/copyScripts.ts",
"copy:servers": "xcopy server\\services\\server\\utils\\serverData.json dist\\server\\services\\server\\utils /E /I /Y", "copy:servers": "xcopy server\\services\\server\\utils\\serverData.json dist\\server\\services\\server\\utils /E /I /Y",
@@ -21,10 +21,13 @@
"deploy": "standard-version --conventional-commits && npm run prodBuild", "deploy": "standard-version --conventional-commits && npm run prodBuild",
"zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"", "zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"v1Build": "cd C:\\Users\\matthes01\\Documents\\logisticsSupportTool && npm run oldBuilder", "v1Build": "cd C:\\Users\\matthes01\\Documents\\logisticsSupportTool && npm run oldBuilder",
"prodBuild": "npm run v1Build && powershell -ExecutionPolicy Bypass -File server/scripts/build.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\" && npm run zipServer", "scriptBuild": "powershell -ExecutionPolicy Bypass -File server/scripts/build.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"",
"removeOld": "rimraf dist && rimraf frontend/dist",
"prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev",
"commit": "cz", "commit": "cz",
"prodinstall": "npm i --omit=dev && npm run db:migrate", "prodinstall": "npm i --omit=dev && npm run db:migrate",
"checkupdates": "npx npm-check-updates" "checkupdates": "npx npm-check-updates",
"testingCode": "dotenvx run -f .env -- tsx watch database/testFiles/checkServerData.ts"
}, },
"config": { "config": {
"commitizen": { "commitizen": {
@@ -32,7 +35,7 @@
} }
}, },
"admConfig": { "admConfig": {
"build": 50, "build": 147,
"oldBuild": "backend-0.1.3.zip" "oldBuild": "backend-0.1.3.zip"
}, },
"devDependencies": { "devDependencies": {
@@ -42,12 +45,12 @@
"@types/js-cookie": "^3.0.6", "@types/js-cookie": "^3.0.6",
"@types/mssql": "^9.1.7", "@types/mssql": "^9.1.7",
"@types/node": "^22.13.11", "@types/node": "^22.13.11",
"@types/node-cron": "^3.0.11",
"@types/nodemailer": "^6.4.17",
"@types/pg": "^8.11.11", "@types/pg": "^8.11.11",
"@types/ws": "^8.18.0", "@types/ws": "^8.18.0",
"concurrently": "^9.1.2", "concurrently": "^9.1.2",
"cz-conventional-changelog": "^3.3.0", "cz-conventional-changelog": "^3.3.0",
"drizzle-kit": "^0.30.5",
"fs-extra": "^11.3.0",
"standard-version": "^9.5.0", "standard-version": "^9.5.0",
"tsx": "^4.19.3", "tsx": "^4.19.3",
"typescript": "^5.8.2" "typescript": "^5.8.2"
@@ -57,14 +60,19 @@
"@hono/node-server": "^1.14.0", "@hono/node-server": "^1.14.0",
"@hono/zod-openapi": "^0.19.2", "@hono/zod-openapi": "^0.19.2",
"@scalar/hono-api-reference": "^0.7.2", "@scalar/hono-api-reference": "^0.7.2",
"@tanstack/react-form": "^1.2.1",
"@types/jsonwebtoken": "^9.0.9", "@types/jsonwebtoken": "^9.0.9",
"@types/nodemailer-express-handlebars": "^4.0.5",
"adm-zip": "^0.5.16", "adm-zip": "^0.5.16",
"axios": "^1.8.4", "axios": "^1.8.4",
"bcryptjs": "^3.0.2", "bcryptjs": "^3.0.2",
"croner": "^9.0.0",
"date-fns": "^4.1.0", "date-fns": "^4.1.0",
"drizzle-kit": "^0.30.5",
"drizzle-orm": "^0.41.0", "drizzle-orm": "^0.41.0",
"drizzle-zod": "^0.7.0", "drizzle-zod": "^0.7.0",
"fast-xml-parser": "^5.0.9", "fast-xml-parser": "^5.0.9",
"fs-extra": "^11.3.0",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"mssql": "^11.0.1", "mssql": "^11.0.1",
"nodemailer": "^6.10.0", "nodemailer": "^6.10.0",
@@ -75,6 +83,7 @@
"pino-pretty": "^13.0.0", "pino-pretty": "^13.0.0",
"postgres": "^3.4.5", "postgres": "^3.4.5",
"rimraf": "^6.0.1", "rimraf": "^6.0.1",
"st-ethernet-ip": "^2.7.3",
"ws": "^8.18.1", "ws": "^8.18.1",
"zod": "^3.24.2" "zod": "^3.24.2"
} }

View File

@@ -0,0 +1,69 @@
export const freightClass = (
weight: number,
length: number,
width: number,
height: number
) => {
// mm to in conversion
const convertMM = 25.4;
const convertKG = 2.20462;
// Inputs
const weightPounds = weight * convertKG;
const lengthInches = length / convertMM;
const widthInches = width / convertMM;
const heightInches = height / convertMM;
// Calculate volume in cubic inches
const volumeCubicInches = lengthInches * widthInches * heightInches;
// Convert cubic inches to cubic feet
const volumeCubicFeet = volumeCubicInches / 1728;
// Calculate density
const density = weightPounds / volumeCubicFeet;
// Determine freight class
let freightClass;
if (density >= 50) {
freightClass = 50;
} else if (density >= 35) {
freightClass = 55;
} else if (density >= 30) {
freightClass = 60;
} else if (density >= 22.5) {
freightClass = 65;
} else if (density >= 15) {
freightClass = 70;
} else if (density >= 13.5) {
freightClass = 77.5;
} else if (density >= 12) {
freightClass = 85;
} else if (density >= 10.5) {
freightClass = 92.5;
} else if (density >= 9) {
freightClass = 100;
} else if (density >= 8) {
freightClass = 110;
} else if (density >= 7) {
freightClass = 125;
} else if (density >= 6) {
freightClass = 150;
} else if (density >= 5) {
freightClass = 175;
} else if (density >= 4) {
freightClass = 200;
} else if (density >= 3) {
freightClass = 250;
} else if (density >= 2) {
freightClass = 300;
} else if (density >= 1) {
freightClass = 400;
} else {
freightClass = 500;
}
// Output the freight class
return freightClass;
};

View File

@@ -1,12 +1,12 @@
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
import { serve } from "@hono/node-server"; import { serve } from "@hono/node-server";
import { OpenAPIHono } from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
import { proxy } from "hono/proxy";
import { serveStatic } from "@hono/node-server/serve-static"; import { serveStatic } from "@hono/node-server/serve-static";
import { logger } from "hono/logger"; import { logger } from "hono/logger";
import { cors } from "hono/cors"; import { cors } from "hono/cors";
import { createLog } from "./services/logger/logger.js"; import { createLog } from "./services/logger/logger.js";
import { WebSocketServer } from "ws";
// custom routes // custom routes
import scalar from "./services/general/route/scalar.js"; import scalar from "./services/general/route/scalar.js";
import system from "./services/server/systemServer.js"; import system from "./services/server/systemServer.js";
@@ -21,7 +21,12 @@ import loggerService from "./services/logger/loggerService.js";
import ocpService from "./services/ocp/ocpService.js"; import ocpService from "./services/ocp/ocpService.js";
import { db } from "../database/dbclient.js"; import { db } from "../database/dbclient.js";
import { settings } from "../database/schema/settings.js"; import { settings } from "../database/schema/settings.js";
import { count } from "drizzle-orm"; import os from "os";
import { tryCatch } from "./globalUtils/tryCatch.js";
import { sendEmail } from "./services/notifications/controller/sendMail.js";
import notify from "./services/notifications/notifyService.js";
import eom from "./services/eom/eomService.js";
import dataMart from "./services/dataMart/dataMartService.js";
// create the main prodlogin here // create the main prodlogin here
const username = "lst_user"; const username = "lst_user";
@@ -29,9 +34,17 @@ const password = "Alpla$$Prod";
export const lstAuth = btoa(`${username}:${password}`); export const lstAuth = btoa(`${username}:${password}`);
// checking to make sure we have the settings intialized // checking to make sure we have the settings intialized
const serverIntialized = await db.select({ count: count() }).from(settings); const { data: settingsData, error: settingError } = await tryCatch(
db.select().from(settings)
);
if (settingError) {
throw Error("Error getting settings from the db. critical error.");
}
const serverIntialized: any = settingsData;
export const installed = export const installed =
serverIntialized[0].count === 0 && process.env.NODE_ENV !== "development" serverIntialized.length === 0 && process.env.NODE_ENV !== "development"
? false ? false
: true; : true;
createLog("info", "LST", "server", `Server is installed: ${installed}`); createLog("info", "LST", "server", `Server is installed: ${installed}`);
@@ -39,7 +52,10 @@ createLog("info", "LST", "server", `Server is installed: ${installed}`);
const app = new OpenAPIHono({ strict: false }); const app = new OpenAPIHono({ strict: false });
// middle ware // middle ware
app.use("*", logger()); if (process.env.NODE_ENV === "development") {
app.use("*", logger());
}
app.use( app.use(
"*", "*",
cors({ cors({
@@ -53,17 +69,17 @@ app.use(
); );
// Middleware to normalize route case // Middleware to normalize route case
app.use("*", async (c, next) => { // app.use("*", async (c, next) => {
const lowercasedUrl = c.req.url.toLowerCase(); // // const lowercasedUrl = c.req.url.toLowerCase();
//console.log("Incoming Request:", c.req.url, c.req.method); // console.log("Incoming Request:", c.req.url, c.req.method);
// If the URL is already lowercase, continue as usual // // // If the URL is already lowercase, continue as usual
if (c.req.url === lowercasedUrl) { // // if (c.req.url === lowercasedUrl) {
return next(); // await next();
} // // }
// Otherwise, re-route internally // // // Otherwise, re-route internally
return c.redirect(lowercasedUrl, 308); // 308 preserves the HTTP method // // return c.redirect(lowercasedUrl, 308); // 308 preserves the HTTP method
}); // });
app.doc("/api/ref", { app.doc("/api/ref", {
openapi: "3.0.0", openapi: "3.0.0",
@@ -85,6 +101,9 @@ const routes = [
printers, printers,
loggerService, loggerService,
ocpService, ocpService,
notify,
eom,
dataMart,
] as const; ] as const;
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {
@@ -143,7 +162,18 @@ process.on("SIGTERM", async () => {
process.on("uncaughtException", async (err) => { process.on("uncaughtException", async (err) => {
console.log("Uncaught Exception:", err); console.log("Uncaught Exception:", err);
//await closePool(); //await closePool();
process.exit(1); const emailData = {
email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused.
subject: `${os.hostname()} has just encountered a crash.`,
template: "serverCrash",
context: {
error: err,
plant: `${os.hostname()}`,
},
};
await sendEmail(emailData);
//process.exit(1);
}); });
process.on("beforeExit", async () => { process.on("beforeExit", async () => {

View File

@@ -53,14 +53,12 @@ add in the below and change each setting area that says change me to something t
```env ```env
# PORTS # PORTS
PROD_PORT=4000
# To keep it all simple we will pass VITE to the ports that are used on both sides. # To keep it all simple we will pass VITE to the ports that are used on both sides.
VITE_SERVER_PORT=4400 VITE_SERVER_PORT=4000
# logLevel # logLevel
LOG_LEVEL=debug LOG_LEVEL=info
PROD_PORT=4000
# DUE to lstv1 we need 3000
SEC_PORT=3000
# Auth stuff # Auth stuff
SALTING=12 SALTING=12
SECRET=CHANGEME SECRET=CHANGEME
@@ -138,6 +136,36 @@ Next use the example command below to get the service up and running.
.\services.ps1 -serviceName "LSTV2" -option "install" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start" .\services.ps1 -serviceName "LSTV2" -option "install" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
``` ```
### Adding servers to the mix to update on from the front end
you will need to add your servers into the serverData.json.
when the server starts up it will look at this file and make changes as needed.
below is an example of the server
```JSON
{
"sName": "Kansas City",
"serverDNS": "usksc1vms006",
"plantToken": "usksc1",
"idAddress": "10.42.9.26",
"greatPlainsPlantCode": "85",
"streetAddress": "1800 E 94th St Suite 300",
"cityState": "Kansas City, MO",
"zipcode": "64131",
"contactEmail": "example@example.com",
"contactPhone": "555-555-5555",
"customerTiAcc": "ALPL01KCINT",
"lstServerPort": "4000",
"active": false,
"serverLoc": "E:\\LST\\lstv2",
"oldVersion": "E:\\LST\\lst_backend",
"shippingHours": "[{\"early\": \"06:30\", \"late\": \"23:00\"}]",
"tiPostTime": "[{\"from\": \"24\", \"to\": \"24\"}]",
"otherSettings": [{ "specialInstructions": "" }]
}
```
# Migrating From V1 to V2 # Migrating From V1 to V2
## User migration ## User migration

View File

@@ -4,14 +4,37 @@ param (
[string]$appPath, [string]$appPath,
[string]$command, # just the command like run startadm or what ever you have in npm. [string]$command, # just the command like run startadm or what ever you have in npm.
[string]$description [string]$description
[string]$remote
) )
# Example string to run with the parameters in it. # Example string to run with the parameters in it.
# .\services.ps1 -serviceName "LSTV2" -option "install" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start" # .\services.ps1 -serviceName "LSTV2" -option "install" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
### the fix
# .\services.ps1 -serviceName "LST-App" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
# .\services.ps1 -serviceName "LST-frontend" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
# .\services.ps1 -serviceName "LST-System" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
# .\services.ps1 -serviceName "LST-Gateway" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
# .\services.ps1 -serviceName "LST-App" -option "install" -appPath "E:\LST\lst_backend" -description "Logistics Support Tool V2" -command "run startapp"
# .\services.ps1 -serviceName "LST-frontend" -option "install" -appPath "E:\LST\lst_backend" -description "Logistics Support Tool V2" -command "run startfront"
$nssmPath = $AppPath + "\nssm.exe" $nssmPath = $AppPath + "\nssm.exe"
$npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd $npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd
if($remote -eq "true"){
$plantFunness = {
param ($service, $processType, $location)
# Call your PowerShell script inside plantFunness
& "$($location)\dist\server\scripts\services.ps1" -serviceName $service -option $processType -appPath $location
}
Invoke-Command -ComputerName $server -ScriptBlock $plantFunness -ArgumentList $service, $option, $appPath -Credential $credentials
}
if (-not ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")) { if (-not ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")) {
Write-Host "Error: This script must be run as Administrator." Write-Host "Error: This script must be run as Administrator."
exit 1 exit 1

View File

@@ -85,8 +85,23 @@ $plantFunness = {
$localPath = $location -replace '\$', ':' $localPath = $location -replace '\$', ':'
$serverFile = "$($localPath)\$buildFile" $serverFile = "$($localPath)\$buildFile"
$serverPath = "$($localPath)" $serverPath = "$($localPath)"
$appPath = $extractedFolderPath
$nssmPath = $serverPath + "\nssm.exe"
$npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd
Write-Host "In the plant we go!!!!!" Write-Host "In the plant we go!!!!!"
######################################################################################
# Removing the fist and frontend folder to make sure we keep them the same and clean.
######################################################################################
# Delete the directories after extraction
Write-Host "Deleting Dist and Frontend..."
Set-Location $serverPath
npm run removeOld # --omit=dev
Write-Host "Unzipping the folder..." Write-Host "Unzipping the folder..."
$extractedFolderPath = $serverPath $extractedFolderPath = $serverPath
@@ -117,6 +132,17 @@ $plantFunness = {
exit 1 # Exit with a non-zero code if there's an error exit 1 # Exit with a non-zero code if there's an error
} }
# for iowa 2 need to change the port config on the start up of nextjs server
if($token -eq "usiow2"){
$jsonPkgloc = "$($obslst)\apps\frontend\package.json"
#read the file
$jsonContent = Get-Content -Path $jsonPkgloc | ConvertFrom-Json
#change the second we want to update
$jsonContent.scripts.start = "next start -p 3001"
# convert back to json
$jsonContent | ConvertTo-Json | Set-Content -Path $jsonPkgloc
}
############################################################################ ############################################################################
Write-Host "Stopping the services to do the updates, pkgs and db changes." Write-Host "Stopping the services to do the updates, pkgs and db changes."
@@ -162,10 +188,6 @@ $plantFunness = {
# Service removoal and making sure we have the new version added # Service removoal and making sure we have the new version added
################################################################# #################################################################
$appPath = $extractedFolderPath
$nssmPath = $serverPath + "\nssm.exe"
$npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd
################################################################# #################################################################
# Removing all the old services # Removing all the old services
################################################################# #################################################################
@@ -199,7 +221,9 @@ $plantFunness = {
Set-Location $serverPath Set-Location $serverPath
npm run prodinstall # --omit=dev npm run prodinstall # --omit=dev
Write-Host "Finished doing updates" Write-Host "Finished doing updates"
Start-Sleep -Seconds 1 # Start-Sleep -Seconds 1
# Write-HOst "Running db migrations"
# npm run db:migrate
########################################################### ###########################################################
# Old system still active until we have everything off it # Old system still active until we have everything off it
@@ -336,14 +360,14 @@ try {
$gatewayport = "4400" $gatewayport = "4400"
$systemport = "4200" $systemport = "4200"
$ocmeport = "4300" $ocmeport = "4300"
$appport = "4900" $appport = "4400"
if ($token -eq "usiow2") { if ($token -eq "usiow2") {
$dbLink = "lstBackendDB_2" $dbLink = "lstBackendDB_2"
$gatewayport = "4401" $gatewayport = "4401"
$systemport = "4201" $systemport = "4201"
$ocmeport = "4301" $ocmeport = "4301"
$appport = "4901" $appport = "4401"
} }
if ($token -in @("test1", "test2", "test3")) { if ($token -in @("test1", "test2", "test3")) {
@@ -395,13 +419,13 @@ try {
########################################################### ###########################################################
# Starting the services back up. # Starting the services back up.
########################################################### ###########################################################
Write-Host "Starting the services" # Write-Host "Starting the services"
Write-Host "Starting $($serviceSystem)" # Write-Host "Starting $($serviceSystem)"
Start-Service -DisplayName $serviceSystem # Start-Service -DisplayName $serviceSystem
Start-Sleep -Seconds 1 # Start-Sleep -Seconds 1
Write-Host "Starting $($serviceGateway)" # Write-Host "Starting $($serviceGateway)"
Start-Service -DisplayName $serviceGateway # Start-Service -DisplayName $serviceGateway
Start-Sleep -Seconds 1 # Start-Sleep -Seconds 1
#Write-Host "Starting $($serviceAuth)" #Write-Host "Starting $($serviceAuth)"
#Start-Service -DisplayName $serviceAuth #Start-Service -DisplayName $serviceAuth
#Start-Sleep -Seconds 1 #Start-Sleep -Seconds 1
@@ -415,10 +439,10 @@ try {
Start-Service -DisplayName $serviceLstV2 Start-Service -DisplayName $serviceLstV2
Start-Sleep -Seconds 1 Start-Sleep -Seconds 1
Write-Host "$($server) finished updating" Write-Host "$($server) finished updating"
if($token -eq "usday1"){ # if($token -eq "usday1"){
Write-Host "Starting $($serviceOcme)" # Write-Host "Starting $($serviceOcme)"
Start-Service -DisplayName $serviceOcme # Start-Service -DisplayName $serviceOcme
} # }
} }
Invoke-Command -ComputerName $server -ScriptBlock $plantFunness -ArgumentList $server, $token, $location, $buildZip, $buildLoc, $obslst, $obsBuild -Credential $credentials Invoke-Command -ComputerName $server -ScriptBlock $plantFunness -ArgumentList $server, $token, $location, $buildZip, $buildLoc, $obslst, $obsBuild -Credential $credentials

View File

@@ -0,0 +1,22 @@
# Define the array of folders
$folders = @(
"AlplaBasis",
"AlplaBudget",
"AlplaINVOICE",
"AlplaLabel",
"AlplaOrder",
"AlplaPlanning",
"AlplaPurchase",
"AlplaStock",
"PDF24",
"Module shortcuts"
)
# Set permissions using icacls
$permissions = "Everyone:(OI)(CI)F"
# Loop through each folder and set permissions
foreach ($folder in $folders) {
$folderPath = "C:\Sources\AlplaPROD\$folder"
icacls $folderPath /grant $permissions /t /c /q
}

View File

@@ -1,16 +1,19 @@
import {spawn} from "child_process"; import { spawn } from "child_process";
import {getAppInfo} from "../globalUtils/appInfo.js"; import { getAppInfo } from "../globalUtils/appInfo.js";
import {db} from "../../database/dbclient.js"; import { db } from "../../database/dbclient.js";
import {serverData} from "../../database/schema/serverData.js"; import { serverData } from "../../database/schema/serverData.js";
import {eq, sql} from "drizzle-orm"; import { eq, sql } from "drizzle-orm";
import {createLog} from "../services/logger/logger.js"; import { createLog } from "../services/logger/logger.js";
type UpdateServerResponse = { type UpdateServerResponse = {
success: boolean; success: boolean;
message: string; message: string;
}; };
export const updateServer = async (devApp: string, server: string | null): Promise<UpdateServerResponse> => { export const updateServer = async (
devApp: string,
server: string | null
): Promise<UpdateServerResponse> => {
const app = await getAppInfo(devApp); const app = await getAppInfo(devApp);
const serverInfo = await db const serverInfo = await db
.select() .select()
@@ -26,7 +29,8 @@ export const updateServer = async (devApp: string, server: string | null): Promi
); );
return { return {
success: false, success: false,
message: "Looks like you are missing the plant token or have entered an incorrect one please try again.", message:
"Looks like you are missing the plant token or have entered an incorrect one please try again.",
}; };
} }
@@ -78,7 +82,7 @@ export const updateServer = async (devApp: string, server: string | null): Promi
// change the server to upgradeing // change the server to upgradeing
await db await db
.update(serverData) .update(serverData)
.set({isUpgrading: true}) .set({ isUpgrading: true })
.where(eq(serverData.plantToken, server?.toLowerCase() ?? "")); .where(eq(serverData.plantToken, server?.toLowerCase() ?? ""));
//let stdout = ""; //let stdout = "";
//let stderr = ""; //let stderr = "";
@@ -109,8 +113,13 @@ export const updateServer = async (devApp: string, server: string | null): Promi
try { try {
await db await db
.update(serverData) .update(serverData)
.set({lastUpdated: sql`NOW()`, isUpgrading: false}) .set({ lastUpdated: sql`NOW()`, isUpgrading: false })
.where(eq(serverData.plantToken, server?.toLowerCase() ?? "")); .where(
eq(
serverData.plantToken,
server?.toLowerCase() ?? ""
)
);
createLog( createLog(
"info", "info",
"lst", "lst",
@@ -156,17 +165,35 @@ export const updateServer = async (devApp: string, server: string | null): Promi
export async function processAllServers(devApp: string) { export async function processAllServers(devApp: string) {
const servers = await db.select().from(serverData); const servers = await db.select().from(serverData);
createLog("info", "lst", "serverUpdater", `Running the update on all servers`); createLog(
"info",
"lst",
"serverUpdater",
`Running the update on all servers`
);
let count = 1; let count = 1;
for (const server of servers) { for (const server of servers) {
try { try {
const updateToServer = await updateServer(devApp, server.plantToken); const updateToServer = await updateServer(
createLog("info", "lst", "serverUpdater", `${server.sName} was updated.`); devApp,
server.plantToken
);
createLog(
"info",
"lst",
"serverUpdater",
`${server.sName} was updated.`
);
count = count + 1; count = count + 1;
//return {success: true, message: `${server.sName} was updated.`, data: updateToServer}; //return {success: true, message: `${server.sName} was updated.`, data: updateToServer};
} catch (error: any) { } catch (error: any) {
createLog("info", "lst", "serverUpdater", `Error updating ${server.sName}: ${error.message}`); createLog(
"info",
"lst",
"serverUpdater",
`Error updating ${server.sName}: ${error.message}`
);
//return {success: false, message: `Error updating ${server.sName}: ${error.message}`}; //return {success: false, message: `Error updating ${server.sName}: ${error.message}`};
} }
} }

View File

@@ -88,7 +88,11 @@ const updateBuildNumber = (appLock: string) => {
pkgJson.admConfig.build += 1; pkgJson.admConfig.build += 1;
// Write the updated data back // Write the updated data back
fs.writeFileSync(packagePath, JSON.stringify(pkgJson, null, 2), "utf8"); fs.writeFileSync(
packagePath,
JSON.stringify(pkgJson, null, 2),
"utf8"
);
createLog( createLog(
"info", "info",
@@ -99,7 +103,7 @@ const updateBuildNumber = (appLock: string) => {
// Auto-commit changes // Auto-commit changes
execSync("git add package.json"); execSync("git add package.json");
execSync( execSync(
`git commit -m "build: bump build number to ${pkgJson.admConfig.build}"` `git commit -m "chore(release): bump build number to ${pkgJson.admConfig.build}"`
); );
} else { } else {
createLog( createLog(
@@ -162,12 +166,17 @@ export const createZip = async (appLock: string) => {
`app Files (sorted by time):", ${JSON.stringify(appFiles)}` `app Files (sorted by time):", ${JSON.stringify(appFiles)}`
); );
if (appFiles.length > 5) { if (appFiles.length > 20) {
appFiles.slice(0, -5).forEach((file) => { appFiles.slice(0, -20).forEach((file) => {
const filePath = path.join(destPath, file.name); const filePath = path.join(destPath, file.name);
try { try {
fs.unlinkSync(filePath); fs.unlinkSync(filePath);
createLog("info", "lst", "zipUpBuild", `Deleted: ${file.name}`); createLog(
"info",
"lst",
"zipUpBuild",
`Deleted: ${file.name}`
);
} catch (error: any) { } catch (error: any) {
createLog( createLog(
"error", "error",

View File

@@ -1,14 +1,22 @@
import {eq} from "drizzle-orm"; import { eq } from "drizzle-orm";
import {db} from "../../../../database/dbclient.js"; import { db } from "../../../../database/dbclient.js";
import {users} from "../../../../database/schema/users.js"; import { users } from "../../../../database/schema/users.js";
import {createPassword} from "../utils/createPassword.js"; import { createPassword } from "../utils/createPassword.js";
import {setSysAdmin} from "./userRoles/setSysAdmin.js"; import { setSysAdmin } from "./userRoles/setSysAdmin.js";
import { createLog } from "../../logger/logger.js";
export const registerUser = async (username: string, password: string, email: string) => { export const registerUser = async (
username: string,
password: string,
email: string
) => {
const usercount = await db.select().from(users); const usercount = await db.select().from(users);
// make sure the user dose not already exist in the system // make sure the user dose not already exist in the system
const userCheck = await db.select().from(users).where(eq(users.username, username)); const userCheck = await db
.select()
.from(users)
.where(eq(users.username, username));
if (userCheck.length === 1) { if (userCheck.length === 1) {
return { return {
@@ -26,18 +34,26 @@ export const registerUser = async (username: string, password: string, email: st
try { try {
const user = await db const user = await db
.insert(users) .insert(users)
.values({username, email, password}) .values({ username, email, password })
.returning({user: users.username, email: users.email}); .returning({ user: users.username, email: users.email });
if (usercount.length <= 1) { if (usercount.length <= 1) {
console.log(`${username} is the first user and will be set to system admin.`); createLog(
const updateUser = await db.select().from(users).where(eq(users.username, username)); "info",
"auth",
"auth",
`${username} is the first user and will be set to system admin.`
);
const updateUser = await db
.select()
.from(users)
.where(eq(users.username, username));
setSysAdmin(updateUser, "systemAdmin"); setSysAdmin(updateUser, "systemAdmin");
} }
return {sucess: true, message: "User Registered", user}; return { sucess: true, message: "User Registered", user };
} catch (error) { } catch (error) {
console.log(error); createLog("error", "auth", "auth", `${error}`);
return { return {
success: false, success: false,
message: `${username} already exists please login or reset password, if you feel this is an error please contact your admin.`, message: `${username} already exists please login or reset password, if you feel this is an error please contact your admin.`,

View File

@@ -5,6 +5,7 @@ import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import type { User } from "../../../../types/users.js"; import type { User } from "../../../../types/users.js";
import { createPassword } from "../../utils/createPassword.js"; import { createPassword } from "../../utils/createPassword.js";
import { createLog } from "../../../logger/logger.js"; import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../../../notifications/controller/sendMail.js";
export const updateUserADM = async (userData: User) => { export const updateUserADM = async (userData: User) => {
/** /**
@@ -12,6 +13,7 @@ export const updateUserADM = async (userData: User) => {
* password, username, email. * password, username, email.
*/ */
console.log(userData);
createLog( createLog(
"info", "info",
"apiAuthedRoute", "apiAuthedRoute",
@@ -45,6 +47,7 @@ export const updateUserADM = async (userData: User) => {
username: userData.username ? userData.username : upd_user?.username, username: userData.username ? userData.username : upd_user?.username,
password: password, password: password,
email: userData.email ? userData.email : upd_user.email, email: userData.email ? userData.email : upd_user.email,
role: userData.role ? userData.role : upd_user.role,
}; };
// term ? ilike(posts.title, term) : undefined // term ? ilike(posts.title, term) : undefined
@@ -60,6 +63,19 @@ export const updateUserADM = async (userData: User) => {
}; };
} }
if (userData?.password!.length > 0) {
// send this user an email so they have the randomized password.
await sendEmail({
email: user[0]?.email,
subject: "LST - Password reset.",
template: "passwordReset",
context: {
password: userData.password!,
username: user[0].username!,
},
});
}
return { return {
success: true, success: true,
message: `${userData.username} has been updated.`, message: `${userData.username} has been updated.`,

View File

@@ -48,10 +48,19 @@ app.openapi(
//apiHit(c, { endpoint: "api/auth/setUserRoles" }); //apiHit(c, { endpoint: "api/auth/setUserRoles" });
const { username, module, role, override } = await c.req.json(); const { username, module, role, override } = await c.req.json();
try { try {
const access = await setUserAccess(username, module, role, override); const access = await setUserAccess(
username,
module,
role,
override
);
//return apiReturn(c, true, access?.message, access?.data, 200); //return apiReturn(c, true, access?.message, access?.data, 200);
return c.json( return c.json(
{ success: access.success, message: access.message, data: access.data }, {
success: access.success,
message: access.message,
data: access.data,
},
200 200
); );
} catch (error) { } catch (error) {

View File

@@ -29,13 +29,7 @@ const UserAccess = z.object({
.openapi({ example: "smith@example.com" }), .openapi({ example: "smith@example.com" }),
password: z password: z
.string() .string()
.min(6, { message: "Passwords must be longer than 3 characters" })
.regex(/[A-Z]/, {
message: "Password must contain at least one uppercase letter",
})
.regex(/[\W_]/, {
message: "Password must contain at least one special character",
})
.optional() .optional()
.openapi({ example: "Password1!" }), .openapi({ example: "Password1!" }),
}); });
@@ -44,7 +38,7 @@ app.openapi(
createRoute({ createRoute({
tags: ["Auth:admin"], tags: ["Auth:admin"],
summary: "updates a specific user", summary: "updates a specific user",
method: "post", method: "patch",
path: "/updateuser", path: "/updateuser",
middleware: [ middleware: [
authMiddleware, authMiddleware,

View File

@@ -0,0 +1,13 @@
import { query } from "../../sqlServer/prodSqlServer.js";
import { activeArticle } from "../../sqlServer/querys/dataMart/article.js";
export const getActiveAv = async () => {
let articles: any = [];
try {
articles = await query(activeArticle, "Get active articles");
} catch (error) {
articles = error;
}
return articles;
};

View File

@@ -0,0 +1,23 @@
import { query } from "../../sqlServer/prodSqlServer.js";
import {
totalInvNoRn,
totalInvRn,
} from "../../sqlServer/querys/dataMart/totalINV.js";
export const getINV = async () => {
let inventory: any = [];
let updatedQuery = totalInvNoRn;
try {
inventory = await query(updatedQuery, "Gets Curruent inv");
return { success: true, message: "Current inv", data: inventory };
} catch (error) {
console.log(error);
return {
success: false,
message: "There was an error getting the inventory",
data: error,
};
}
};

View File

@@ -0,0 +1,14 @@
import { OpenAPIHono } from "@hono/zod-openapi";
import activequerys from "./route/getCurrentQuerys.js";
import getArticles from "./route/getActiveArticles.js";
import currentInv from "./route/getInventory.js";
const app = new OpenAPIHono();
const routes = [activequerys, getArticles, currentInv] as const;
const appRoutes = routes.forEach((route) => {
app.route("/datamart", route);
});
export default app;

View File

@@ -0,0 +1,47 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getActiveAv } from "../controller/getActiveArticles.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns all the Active articles.",
method: "get",
path: "/getarticles",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
//apiHit(c, { endpoint: `api/logger/logs/id` });
try {
return c.json(
{
success: true,
message: "Current active Articles",
data: await getActiveAv(),
},
200
);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,102 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
const app = new OpenAPIHono({ strict: false });
const current: any = [
{
name: "getActiveAv",
endpoint: "/api/datamart/getarticles",
description: "Gets all current active AV, with specific critiera.",
},
// {
// name: "getStockLaneDims",
// endpoint: "/api/v1/masterData/getStockDims",
// description: "Returns the lane dims along with a column to send actaul dims to be updated.",
// },
// {
// name: "getAddressInfo",
// endpoint: "/api/v1/masterData/getAddressInfo",
// description: "Returns current active addresses with street and zip",
// },
// {
// name: "getMissingPkgData",
// endpoint: "/api/v1/masterData/getMissingPKGData",
// description: "Returns all packaging data that is missing either printer, layout, or carton layout",
// },
// {
// name: "getCustomerInventory",
// endpoint: "/api/v1/masterData/getCustomerInventory",
// description: "Returns specific customer inventory based on there address ID.",
// criteria: "customer",
// },
// {
// name: "getPalletLabels",
// endpoint: "/api/v1/masterData/getPalletLabels",
// description: "Returns specific amount of pallets RN, Needs label number and printer, Specfic to Dayton.",
// criteria: "runningNumber,printerName,count",
// },
// {
// name: "getOpenOrders",
// endpoint: "/api/v1/masterData/getOpenOrders",
// description:
// "Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
// criteria: "sDay,eDay",
// },
// {
// name: "getOpenIncoming",
// endpoint: "/api/v1/masterData/getOpenIncoming",
// description:
// "Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
// criteria: "sDay,eDay",
// },
// {
// name: "planningCheckPkg",
// endpoint: "/api/v1/masterData/planningPkgCheck",
// description: "Returns all lots starting later than today and has a pkg that is missing layouts.",
// },
{
name: "getinventory",
endpoint: "/api/datamart/getinventory",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all inventory, excludes inv locations. no running numbers",
//criteria: "includeRunnningNumbers", // uncomment this out once the improt process can be faster
},
// {
// name: "getOpenOrderUpdates",
// endpoint: "/api/v1/masterData/getOpenOrderUpdates",
// // description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
// description: "Returns all orders based on customer id, leaving empty will pull everythinng in.",
// criteria: "customer", // uncomment this out once the improt process can be faster
// },
// {
// name: "getSiloAdjustment",
// endpoint: "/api/v1/warehouse/getSiloAdjustment",
// // description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
// description: "Returns all siloadjustments in selected date range IE: 1/1/2025 to 1/31/2025",
// criteria: "startDate,endDate", // uncomment this out once the improt process can be faster
// },
];
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns all avalible querys.",
method: "get",
path: "/getavalibleaquerys",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
//apiHit(c, { endpoint: `api/logger/logs/id` });
return c.json({
success: true,
message: "All Current Active Querys.",
data: current,
});
}
);
export default app;

View File

@@ -0,0 +1,54 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { getINV } from "../controller/getinventory.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns All current inventory.",
method: "get",
path: "/getinventory",
// request: {
// body: {
// content: {
// "application/json": { schema: Body },
// },
// },
// },
responses: responses(),
}),
async (c) => {
// const { data: body, error } = await c.req.json();
// if (error) {
// return c.json({
// success: false,
// message: "Missing data please try again.",
// });
// }
// make sure we have a vaid user being accessed thats really logged in
//apiHit(c, { endpoint: `api/logger/logs/id` });
const { data, error } = await tryCatch(getINV());
if (error) {
return c.json(
{
success: false,
message: "There was an error getting the inv.",
data: error,
},
400
);
}
return c.json({
success: data.success,
message: data.message,
data: data.data,
});
}
);
export default app;

View File

@@ -0,0 +1,139 @@
// import cron from "node-cron";
// import {runQuery, prisma, totalInvNoRn, activeArticle, getShiftTime, historicalInv} from "database";
// import {createLog} from "logging";
// import {deleteHistory} from "./deleteHistory.js";
// export const historyInv = async (date) => {
// //console.log(date);
// if (!date) {
// return `Missing Data`;
// }
// // date should be sent over as a string IE: 2024-01-01
// let inv = [];
// try {
// inv = await prisma.historyInventory.findMany({where: {histDate: date}});
// console.log(inv.length);
// // if the date returns nothing we need to pull the historical data
// if (inv.length === 0) {
// const result = await prisma.settings.findFirst({where: {name: "plantToken"}});
// try {
// const plantUpdate = historicalInv.replaceAll("test1", result.value);
// const queryDate = plantUpdate.replaceAll("[date]", date);
// inv = await runQuery(queryDate, "Get histical inv");
// return inv;
// } catch (error) {
// createLog("general/eom", "error", "There was an error getting the historical inv.");
// return error;
// }
// } else {
// return inv;
// }
// //return inv;
// } catch (error) {
// console.log(error);
// return error;
// }
// };
// // start the cron job for getting the hostrical inv based on the plants shift time
// export const startCronHist = () => {
// let shiftTime = ["06", "00", "00"];
// const startProcess = async () => {
// let inv = [];
// let articles = [];
// let plantToken = "test1";
// const date = new Date();
// const dateString = date.toISOString().split("T")[0];
// date.setDate(date.getDate() - 30);
// const oldDate = date.toISOString().split("T")[0];
// // checking if even need to run this
// // before adding more make sure we dont already have data
// const checkInv = await prisma.historyInventory.findFirst({where: {histDate: dateString}});
// if (checkInv) {
// createLog(
// "general/eom",
// "warn",
// `There seems to already be inventory added for ${dateString}, no new data will be added`
// );
// return;
// }
// // get plant token
// try {
// const result = await prisma.settings.findFirst({where: {name: "plantToken"}});
// plantToken = result.value;
// } catch (error) {
// createLog("general/eom", "error", "failed to get planttoken");
// }
// //get shift time
// try {
// const result = await runQuery(getShiftTime.replaceAll("test1", plantToken), "GettingShift time");
// shiftTime = result[0].shiftStartTime.split(":");
// } catch (error) {
// createLog("general/eom", "error", `Error running getShift Query: ${error}`);
// }
// // get inventory
// try {
// const result = await runQuery(totalInvNoRn.replaceAll("test1", plantToken), "getting inventory");
// inv = result;
// } catch (error) {
// createLog("general/eom", "error", `Error running get inventory Query: ${error}`);
// }
// // get active articles
// try {
// const result = await runQuery(activeArticle.replaceAll("test1", plantToken), "Get active articles");
// articles = result;
// } catch (error) {
// createLog("general/eom", "error", `Error running get article: ${error}`);
// }
// //add the inventory to the historical table
// try {
// let hist = Object.entries(inv).map(([key, value]) => {
// // remove the values we dont want in the historical view
// const {total_Pallets, avalible_Pallets, coa_Pallets, held_Pallets, ...histData} = value;
// // get av tyep
// const avType = articles.filter((a) => (a.IdArtikelvarianten = inv[key].av))[0].TypeOfMaterial;
// // add in the new fields
// const hist = {
// ...histData,
// histDate: dateString, //new Date(Date.now()).toISOString().split("T")[0],
// avType,
// };
// return hist;
// });
// try {
// const addHistData = await prisma.historyInventory.createMany({data: hist});
// createLog(
// "general/eom",
// "info",
// `${addHistData.count} were just added to the historical inventory for date ${dateString}`
// );
// } catch (error) {
// createLog("general/eom", "error", `Adding new historical inventory error: ${error}`);
// }
// // delete the older inventory
// deleteHistory(oldDate);
// } catch (error) {
// createLog("general/eom", "error", `Adding new historical inventory error: ${error}`);
// }
// };
// // actaully run the process once after restaart just to make sure we have inventory
// startProcess();
// // setup the cron stuff
// const startHour = shiftTime[0];
// const startMin = shiftTime[1];
// createLog("general/eom", "info", `Historical Data will run at ${shiftTime[0]}:${shiftTime[1]} daily`);
// cron.schedule(`${startMin} ${startHour} * * *`, () => {
// createLog("general/eom", "info", "Running historical invnetory.");
// startProcess();
// });
// };

View File

@@ -0,0 +1,15 @@
// import {prisma} from "database";
// import {createLog} from "logging";
// export const deleteHistory = async (date: string) => {
// // delete the inventory if it equals this date
// try {
// const remove = await prisma.$executeRaw`
// DELETE FROM historyInventory
// WHERE histDate < ${date}
// `;
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
// } catch (error) {
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
// }
// };

View File

@@ -1,5 +1,13 @@
import {OpenAPIHono} from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
import stats from "./route/stats.js";
import history from "./route/invHistory.js";
const routes = [stats, history] as const;
const appRoutes = routes.forEach((route) => {
app.route("/eom", route);
});
export default app; export default app;

View File

@@ -0,0 +1,41 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Gets the correct eom history.",
method: "post",
path: "/histinv",
request: {
params: EomStat,
},
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
//apiHit(c, { endpoint: `api/logger/logs/id` });
try {
return c.json({ success: true, message: "", data: [] }, 200);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -0,0 +1,41 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
const app = new OpenAPIHono({ strict: false });
const EomStat = z.object({
plant: z.string().openapi({ example: "Salt Lake City" }),
userRan: z.string().openapi({ example: "smith034" }),
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
});
app.openapi(
createRoute({
tags: ["eom"],
summary: "Adds in the stats for the eom.",
method: "post",
path: "/stats",
request: {
params: EomStat,
},
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: `api/logger/logs/id` });
try {
return c.json({ success: true, message: "", data: [] }, 200);
} catch (error) {
return c.json(
{
success: false,
message: "There was an error posting the eom stat.",
data: error,
},
400
);
}
}
);
export default app;

View File

@@ -1,7 +0,0 @@
import {createLog} from "../../logger/logger.js";
export const sendEmail = async () => {
createLog("info", "lst", "general", "Preparing to send an email");
// settings
};

View File

@@ -1,28 +1,41 @@
import {and, eq, inArray, lte, sql} from "drizzle-orm"; import { and, eq, gte, inArray, lte, sql } from "drizzle-orm";
import {db} from "../../../../database/dbclient.js"; import { db } from "../../../../database/dbclient.js";
import {logs} from "../../../../database/schema/logs.js"; import { logs } from "../../../../database/schema/logs.js";
import {createLog} from "../logger.js"; import { createLog } from "../logger.js";
export const getLogs = async (data: any) => { export const getLogs = async (data: any) => {
try { try {
// clear all remaining logs ne to info. // clear all remaining logs ne to info.
const checked = data.checked && data.checked[0] === "true" ? true : false || false; const checked =
data.checked && data.checked[0] === "true" ? true : false || false;
const logData = await db const logData = await db
.select() .select()
.from(logs) .from(logs)
.where( .where(
and( and(
lte(logs.created_at, sql.raw(`NOW() - INTERVAL '${data.hours} hours'`)), gte(
logs.created_at,
sql.raw(`NOW() - INTERVAL '${data.hours ?? "4"} hours'`)
),
inArray(logs.service, data.service), inArray(logs.service, data.service),
inArray(logs.level, data.level), inArray(logs.level, data.level),
eq(logs.checked, checked) eq(logs.checked, checked)
) )
); );
return {success: true, message: "logs returned", data: logData}; return { success: true, message: "logs returned", data: logData };
} catch (error) { } catch (error) {
console.log(error); console.log(error);
createLog("error", "lst", "logger", `There was an error deleteing server logs. ${error}`); createLog(
return {success: false, message: "An error occured while trying to get the logs", error}; "error",
"lst",
"logger",
`There was an error deleteing server logs. ${error}`
);
return {
success: false,
message: "An error occured while trying to get the logs",
error,
};
} }
}; };

View File

@@ -1,6 +1,6 @@
import {pino, type LogFn, type Logger} from "pino"; import { pino, type LogFn, type Logger } from "pino";
export let logLevel = "info"; export let logLevel = process.env.LOGLEVEL || "info";
const transport = pino.transport({ const transport = pino.transport({
targets: [ targets: [
@@ -31,7 +31,7 @@ const log: Logger = pino(
// }, // },
//customLevels: {death: 70}, //customLevels: {death: 70},
// removes data from the logs that we dont want to be shown :D // removes data from the logs that we dont want to be shown :D
redact: {paths: ["email", "password"], remove: true}, redact: { paths: ["email", "password"], remove: true },
}, },
transport transport
); );
@@ -43,8 +43,11 @@ export const createLog = (
message: string message: string
) => { ) => {
if (level in log) { if (level in log) {
log[level]({username, service}, message); log[level]({ username, service }, message);
} else { } else {
log.warn({username, service}, `Invalid log level '${level}', falling back to warn: ${message}`); log.warn(
{ username, service },
`Invalid log level '${level}', falling back to warn: ${message}`
);
} }
}; };

View File

@@ -0,0 +1,102 @@
import { ConsoleLogWriter } from "drizzle-orm";
import { prodEndpointCreation } from "../../../globalUtils/createUrl.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { labelData } from "../../sqlServer/querys/materialHelpers/labelInfo.js";
import axios from "axios";
import { laneInfo } from "../../sqlServer/querys/materialHelpers/laneInfo.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
type Data = {
runningNr: string;
laneName: string;
};
export const returnMaterial = async (data: Data, prod: any) => {
const { runningNr, laneName } = data;
// replace the rn
const rnReplace = labelData.replaceAll("[rn]", runningNr);
// get the lane id by name
const laneQuery = laneInfo.replaceAll("[laneName]", laneName);
let barcode;
// get the barcode from the running number
try {
barcode = await query(rnReplace, "labelData");
} catch (error) {
console.log(error);
createLog(
"error",
prod.user.username,
"logistics",
`Error getting barcode: ${error}`
);
}
const { data: laneData, error: laneError } = await tryCatch(
query(laneQuery, "laneInfo")
);
if (laneError) {
return {
success: false,
message:
"The lane you entered is either deactivated or dose not exist.",
laneError,
};
}
if (!laneData) {
return {
success: false,
message:
"The lane you entered is either deactivated or dose not exist.",
};
}
if (laneData.length === 0) {
return {
success: false,
message:
"The lane you entered is either deactivated or dose not exist.",
};
}
if (barcode.length === 0) {
return {
success: false,
message: "The running number you've is not in stock.",
};
//throw Error("The provided runningNr is not in stock");
}
// create the url to post
const url = await prodEndpointCreation(
"/public/v1.0/IssueMaterial/ReturnPartiallyConsumedManualMaterial"
);
const returnSomething = {
laneId: laneData[0]?.laneID,
barcode: barcode[0]?.barcode,
};
try {
const results = await axios.post(url, returnSomething, {
headers: {
"Content-Type": "application/json",
Authorization: `Basic ${prod.user.prod}`,
},
});
//console.log(results);
return {
success: true,
message: "Material was returned",
status: results.status,
};
} catch (error: any) {
return {
success: false,
status: 200,
message: error.response?.data.errors[0].message,
};
}
};

View File

@@ -1,9 +1,10 @@
import {OpenAPIHono} from "@hono/zod-openapi"; import { OpenAPIHono } from "@hono/zod-openapi";
import comsumeMaterial from "./route/consumeMaterial.js"; import comsumeMaterial from "./route/consumeMaterial.js";
import returnMat from "./route/returnMaterial.js";
const app = new OpenAPIHono(); const app = new OpenAPIHono();
const routes = [comsumeMaterial] as const; const routes = [comsumeMaterial, returnMat] as const;
// app.route("/server", modules); // app.route("/server", modules);
const appRoutes = routes.forEach((route) => { const appRoutes = routes.forEach((route) => {

View File

@@ -0,0 +1,70 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { verify } from "hono/jwt";
import { returnMaterial } from "../controller/returnMaterial.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Retrns material based on its running number and laneName",
method: "post",
path: "/return",
middleware: authMiddleware,
description:
"Provided a running number and Lane to return the material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
apiHit(c, { endpoint: "api/sqlProd/close" });
const authHeader = c.req.header("Authorization");
const token = authHeader?.split("Bearer ")[1] || "";
try {
const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
const data = await c.req.json();
const consume = await returnMaterial(data, payload);
return c.json(
{ success: consume?.success, message: consume?.message },
200
);
} catch (error) {
//console.log(error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400
);
}
} catch (error) {
return c.json({ success: false, message: "Unauthorized" }, 401);
}
}
);
export default app;

View File

@@ -0,0 +1,143 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../sendMail.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
export interface DownTime {
downTimeId?: number;
machineAlias?: string;
}
export default async function reprintLabelMonitor(notifyData: any) {
// we will over ride this with users that want to sub to this
// a new table will be called subalerts and link to the do a kinda linkn where the user wants it then it dose subId: 1, userID: x, notificationId: y. then in here we look up the userid to get the email :D
// this could then leave the emails in the notificaion blank and let users sub to it.
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
// console.log(data.secondarySetting[0].duration);
let dQuery = `
SELECT
[IdHistoryStillstandsereignis] as downTimeId
,DATEDIFF(MINUTE,b.[Startzeit], b.[Endzeit]) as totalDuration
--, b.[IdMaschine]
,x.[Bezeichnung] as machineAlias
--,b.[IdStillstandsGrund],
, c.CTO_Code
,c.Downtime_Description
--,b.[IdFehlermerkmal],
,case when g.DT_Group_Desc is null then 'Not assigned yet' else g.DT_Group_Desc end as groupDesc
,b.[Bemerkung] as remark
,CONVERT(VARCHAR, CAST(b.[Startzeit] AS DATETIME), 100) dtStart
,CONVERT(VARCHAR, CAST(b.[Endzeit] AS DATETIME), 100) dtEnd
FROM Alplaprod_test1.[dbo].[T_HistoryStillstandsereignis] (nolock)b
--get the machine info
left join
Alplaprod_test1.[dbo].[T_Maschine] (nolock)x
on b.IdMaschine = x.IdMaschine
-- add in the cto codes
left join
Alplaprod_test1.[dbo].[V_MES_Downtime_Reasons] (nolock)c
on b.IdStillstandsGrund = c.Local_Downtime_ID
left join
Alplaprod_test1.[dbo].[V_MES_Downtime_Characteristics] (nolock)g
on b.IdFehlermerkmal = g.Local_DT_Characteristic_Id
where DATEDIFF(MINUTE,b.[Startzeit],b.[Endzeit]) > ${
notifyData.notifiySettings
? notifyData.notifiySettings?.duration
: 10
}
and b.[Startzeit] > getDate() - ${
notifyData.notifiySettings
? notifyData.notifiySettings?.daysInPast
: 10
} --adding this date check in so we dont get everything possible
and c.CTO_Code not like 'a%'
and c.CTO_Code not like 'b%'
and c.CTO_Code not like 'c%'
and c.CTO_Code not like 'd%'
and c.CTO_Code not like 'e%'
and c.CTO_Code not like 'f%'
and c.CTO_Code not like 'y%'
order by IdHistoryStillstandsereignis desc
`;
//console.log(query);
let downTime: any; //DownTime[];
try {
downTime = await query(dQuery, "downTimeCheck");
//console.log(labels.length);
if (
downTime.length > 0 &&
downTime[0]?.downTimeId > notifyData.notifiySettings.prodID
) {
//send the email :D
const emailSetup = {
emailTo: notifyData.emails,
subject: `Alert! Downtime recorded greater than ${
notifyData.notifiySettings?.duration
}min ${
downTime.length === 1
? `on ${downTime[0].machineAlias}`
: ""
}`,
template: "downTimeCheck",
context: {
items: downTime,
secondarySetting: notifyData.notifiySettings,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notifyData.notifiySettings,
prodID: downTime[0].downTimeId,
},
})
.where(eq(notifications.name, notifyData.name))
);
}
} catch (err) {
createLog(
"error",
"notify",
"notify",
`Error from running the downtimeCheck query: ${err}`
);
}
}

View File

@@ -0,0 +1,27 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
// notifiySettings: {
// ...updateSettings,
// prodID: labels[0].IdEtikettenHistorie,
// },
})
.where(eq(notifications.name, notifyData.name))
);
};
export default notification;

View File

@@ -0,0 +1,133 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { isWeekend } from "date-fns";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { eq, sql } from "drizzle-orm";
export interface PPOO {
IdPosition?: number;
}
export default async function reprintLabelMonitor(notifyData: any) {
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
// parse the secondarySetting back to json to use it.
// notifyData = { ...notifyData, secondarySetting: JSON.parse(notifyData.secondarySetting) };
// as this one goes to managers we want to not send on the weekends
const weekend = isWeekend(new Date(Date.now()));
if (weekend && notifyData.notifiySettings.weekend) {
createLog(
"info",
"notify",
"notify",
`${notifyData.name} will not run on the weekends`
);
return;
}
let notifyQuery = `
SELECT
--[EinlagerungsDatummin] as lastMovingDate,
round(VerfuegbareMengeVPKSum,2) as pallets
,VerfuegbareMengeSum as total
,round([GesperrteMengeVpkSum],2) as held
,round([GesperrteMengeSum],2) as heldQty
,[IdArtikelVarianten] as av
,[IdProdBereich] as pfcID
,[ArtikelVariantenBez] as articleDescription
,[ArtikelVariantenAlias] as articleDescriptionAlias
,[LagerAbteilungKurzBez] as location
,[Lfdnr] as runningNumber
,[Produktionslos] as lot
,[ProduktionsDatumMin] as productionDate
,IdPosition
FROM [AlplaPROD_test1].[dbo].[V_LagerPositionenBarcodes] (nolock)
where idlagerabteilung in ([locations]) and [ProduktionsDatumMin] < DATEadd( Hour, -[timeCheck], getdate())
order by [ProduktionsDatumMin] asc
`;
//update the time check
notifyQuery = notifyQuery.replaceAll("[timeCheck]", notifyData.checkTime);
notifyQuery = notifyQuery.replaceAll(
"[locations]",
notifyData.notifiySettings.locations
);
let prod: PPOO[];
try {
prod = await query(notifyQuery, "Label Reprints");
//console.log(labels.length);
// const now = Date.now()
if (prod.length > 0) {
//send the email :D
// update the count with the result
const emailSetup = {
emailTo: notifyData.emails,
subject: `Alert! Pallets in production greater than ${notifyData.checkTime} ${notifyData.timeType}`,
template: "productionCheck",
context: {
items: prod,
count: prod.length,
checkTime: notifyData.checkTime,
timeCheck: notifyData.timeType,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
let updateSettings = notifyData.notifiySettings;
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...updateSettings,
count: prod.length,
prodID: prod[0].IdPosition,
},
})
.where(eq(notifications.name, notifyData.name))
);
} else {
return;
}
} catch (err) {
createLog(
"error",
"sql",
"error",
`Error from running the Label Reprints query: ${err}`
);
}
}

View File

@@ -0,0 +1,158 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
export interface Blocking {
HumanReadableId?: number;
subject?: string;
}
export default async function qualityBlockingMonitor(notifyData: any) {
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
let blockQuery = `
SELECT
'Alert! new blocking order: #' + cast(HumanReadableId as varchar) + ' - ' + ArticleVariantDescription as subject,
cast([HumanReadableId] as varchar) as blockingNumber,
[ArticleVariantDescription] as article,
cast([CustomerHumanReadableId] as varchar) + ' - ' + [CustomerDescription] as customer,
convert(varchar(10), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 101) + ' - ' + convert(varchar(5), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 108) as blockingDate,
cast(ArticleVariantHumanReadableId as varchar) + ' - ' + ArticleVariantDescription as av,
case when [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark = '' or [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark is NULL then 'Please reach out to quality for the reason this was placed on hold as a remark was not entered during the blocking processs' else [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark end as remark,
cast(FORMAT(TotalAmountOfPieces, '###,###') as varchar) + ' / ' + cast(LoadingUnit as varchar) as peicesAndLoadingUnits,
[test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId as lotNumber,
cast(IdGlobalBlockingDefectsGroup as varchar) + ' - ' + BD.Description as mainDefectGroup,
cast(IdGlobalBlockingDefect as varchar) + ' - ' + MD.Description as mainDefect,
sent=0,
lot.MachineLocation as line,
HumanReadableId
FROM [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder] (nolock)
/*** Join 1.0 table to get correct id info to link ***/
join
[AlplaPROD_test1].[dbo].[T_BlockingOrders] (nolock) AS BO
on [HumanReadableId] = BO.[IdBlockingOrder]
/*** Get the main defect info ***/
Inner join
[AlplaPROD_test1].[dbo].[T_BlockingDefectsGroups] (nolock) as BD
ON BO.IdMainDefectGroup = BD.IdBlockingDefectsGroup
INNER join
[AlplaPROD_test1].[dbo].[T_BlockingDefects] as MD
ON BO.IdMainDefect = MD.IdBlockingDefect
/*** get lot info ***/
left join
(SELECT [MachineLocation]
,[MachineDescription]
,[ProductionLotHumanReadableId]
FROM [test1_AlplaPROD2.0_Reporting].[reporting_productionControlling].[ProducedLot]) as lot
on [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId = lot.ProductionLotHumanReadableId
where [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate] between getdate() - 1 and getdate() + 1
and [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].BlockingTrigger = 1
and HumanReadableId NOT IN ([sentBlockingOrders])
`;
//add the blocking orders in.
blockQuery = blockQuery.replaceAll(
"[sentBlockingOrders]",
notifyData.sentBlocking[0].sentBlockingOrders
);
let blocking: any;
try {
blocking = await query(blockQuery, "Quality Blocking");
//console.log(labels.length);
// const now = Date.now()
//console.log(blocking);
// console.log(blocking[0].blockingNumber > data.prodID);
if (
blocking.length > 0 &&
blocking[0].HumanReadableId > notifyData.notifiySettings.prodID
) {
//send the email :D
const emailSetup = {
emailTo: notifyData.emails,
subject:
blocking.length > 0
? `Alert! New blocking orders.`
: blocking[0].subject,
template: "qualityBlocking",
context: {
items: blocking,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"nofity",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
// add the new blocking order to this
const newBlockingOrders = blocking.map(
(b: any) => b.HumanReadableId
);
//console.log(newBlockingOrders);
//console.log(sentBlocking[0].sentBlockingOrders);
// Ensure no duplicates
const uniqueOrders = Array.from(
new Set([
...notifyData.sentBlocking[0].sentBlockingOrders,
...newBlockingOrders,
])
);
// Update sentBlockingOrders
notifyData.sentBlocking[0].sentBlockingOrders = uniqueOrders;
//console.log(notifUpdate);
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notifyData.notifiySettings,
prodID: blocking[0].HumanReadableId,
sentBlockingOrders: uniqueOrders,
},
})
.where(eq(notifications.name, notifyData.name))
);
}
} catch (err) {
createLog(
"error",
"notify",
"notify",
`Error from running the blocking query: ${err}`
);
}
}

View File

@@ -0,0 +1,118 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
export interface Labels {
IdEtikettenHistorie?: number;
}
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
// well set a backup default time here
let timeCheck = `DATEADD(SECOND, -30, getdate()) `;
// set the time of getting the label
if (notifyData.timeType === "sec") {
timeCheck = `DATEADD(SECOND, -${notifyData.checkTime}, getdate()) `;
} else if (notifyData.timeType === "min") {
timeCheck = `DATEADD(MINUTE, -${notifyData.checkTime}, getdate()) `;
}
let reprintQuery = `
SELECT
IdArtikelvarianten as av,
ArtikelVariantenBez as alias,
LfdNr as runningNumber,
CONVERT(VARCHAR, CAST(Add_Date AS DATETIME), 100) Add_Date,
Add_User,
CONVERT(VARCHAR, CAST(Upd_Date AS DATETIME), 100) Upd_Date,
Upd_User,
EtikettenDruckerBezeichnung as printer,
AnzahlGedruckterKopien as totalPrinted
FROM Alplaprod_test1.dbo.V_EtikettenGedruckt (nolock)
where AnzahlGedruckterKopien > 2
and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108)
and Upd_Date > DATEADD(SECOND, -30, getdate())
and VpkVorschriftBez not like '%$%'
`;
//update the time check
reprintQuery = reprintQuery.replaceAll(
"DATEADD(SECOND, -30, getdate()) ",
timeCheck
);
//let labels: Labels[];
const { data: labels, error: labelError } = await tryCatch(
query(reprintQuery, "Label Reprints")
);
if (labels.length > 0) {
//send the email :D
const emailSetup = {
emailTo: notifyData.emails,
subject: "Alert! Label Reprinted",
template: "reprintLabels",
context: {
items: labels,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// update the last time ran
const updateSettings = notifyData.notifiySettings;
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...updateSettings,
prodID: labels[0].IdEtikettenHistorie,
},
})
.where(eq(notifications.name, notifyData.name))
);
} else {
return;
}
};
export default notification;

View File

@@ -0,0 +1,129 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { isWeekend } from "date-fns";
import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../sendMail.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
export interface PPOO {
IdPosition?: number;
}
export default async function reprintLabelMonitor(notifyData: any) {
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.notificationName}`
);
return;
}
// as this one goes to managers we want to not send on the weekends
const weekend = isWeekend(new Date(Date.now()));
if (weekend && notifyData.notifiySettings.weekend) {
createLog(
"info",
"notify",
"notify",
`${notifyData.name} will not run on the weekends`
);
return;
}
let noteQuery = `
SELECT
--[EinlagerungsDatummin] as lastMovingDate,
round(VerfuegbareMengeVPKSum,2) as pallets
,VerfuegbareMengeSum as total
,round([GesperrteMengeVpkSum],2) as held
,round([GesperrteMengeSum],2) as heldQty
,[IdArtikelVarianten] as av
,[IdProdBereich] as pfcID
,[ArtikelVariantenBez] as articleDescription
,[ArtikelVariantenAlias] as articleDescriptionAlias
,[LagerAbteilungKurzBez] as location
,[Lfdnr] as runningNumber
,[Produktionslos] as lot
,[ProduktionsDatumMin] as productionDate
,IdPosition
FROM [AlplaPROD_test1].[dbo].[V_LagerPositionenBarcodes] (nolock)
where idlagerabteilung in ([locations]) and [ProduktionsDatumMin] < DATEadd( Hour, -[timeCheck], getdate())
order by [ProduktionsDatumMin] asc
`;
//update the time check
noteQuery = noteQuery
.replaceAll("[timeCheck]", notifyData.checkTime)
.replaceAll("[locations]", notifyData.notifiySettings.locations);
let stage: PPOO[];
try {
stage = await query(noteQuery, "Staging checks");
//console.log(labels.length);
// const now = Date.now()
if (stage.length > 0) {
//send the email :D
// update the count with the result
const emailSetup = {
emailTo: notifyData.emails,
subject: `Alert! Pallets in staging greater than ${notifyData.checkTime} ${notifyData.timeType}`,
template: "stagingCheck",
context: {
items: stage,
count: stage.length,
checkTime: notifyData.checkTime,
timeCheck: notifyData.timeType,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
// update the last time we ran and the prod id
let updateSettings = notifyData.notifiySettings;
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...updateSettings,
count: stage.length,
},
})
.where(eq(notifications.name, notifyData.name))
);
} else {
return;
}
} catch (err) {
createLog(
"error",
"notify",
"notify",
`Error from running the Label Reprints query: ${err}`
);
}
}

View File

@@ -0,0 +1,201 @@
export let xmlPayloadTI = `
<service-request>
<service-id>ImportWeb</service-id>
<request-id>[requestID]</request-id>
<data>
<WebImport>
[WebImportHeader]
<WebImportFile>
<MercuryGate>
<Header>
<SenderID/>
<ReceiverID/>
<DocTypeID>MasterBillOfLading</DocTypeID>
<DocCount>1</DocCount>
</Header>
<Load action="UpdateOrAdd">
<Enterprise name="" customerAcctNum="[customerAccountNum]"/>
<AssignedTo/>
<ReferenceNumbers>
<ReferenceNumber type="Load Number" isPrimary="true">[loadNumber]</ReferenceNumber>
</ReferenceNumbers>
<Payment>
<Method>Prepaid</Method>
<BillTo thirdParty="False">
<Address Type="BillTo" isResidential="False">
<Alias/>
<Name>ALPLA</Name>
<AddrLine1>CO TRANSPORTATION INSIGHT</AddrLine1>
<AddrLine2>PO BOX 23000</AddrLine2>
<City>HICKORY</City>
<StateProvince>NC</StateProvince>
<PostalCode>28603</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts/>
</Address>
</BillTo>
</Payment>
<PriceSheets>
<PriceSheet type="Carrier" isSelected="false"> // get this from the price sheet
<ContractId/>
<SCAC/>
<Mode/>
</PriceSheet>
</PriceSheets>
<Plan>
<Events count="2">
<Event type="Pickup" sequenceNum="1">
<Dates>
<Date type="earliest">[loadingDate]</Date>
<Date type="latest">[deliveryDate]</Date>
</Dates>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[plantName]</Name>
<AddrLine1>[plantStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[plantCity]</City>
<StateProvince>[plantState]</StateProvince>
<PostalCode>[plantZipCode]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts>
<Contact type="">
<Name/>
<ContactMethods>
<ContactMethod sequenceNum="1" type="phone">[contactNum]</ContactMethod>
<ContactMethod sequenceNum="1" type="email">[contactEmail]</ContactMethod>
</ContactMethods>
</Contact>
</Contacts>
</Address>
<Shipments>
<ReferenceNumbers>
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
</ReferenceNumbers>
</Shipments>
</Event>
<Event type="Drop" sequenceNum="2">
<Dates>
<Date type="earliest">[loadingDate]</Date>
<Date type="latest">[deliveryDate]</Date>
</Dates>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[customerName]</Name>
<AddrLine1>[customerStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[customerCity]</City>
<StateProvince>[customerState]</StateProvince>
<PostalCode>[customerZip]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts>
<Contact type="">
<Name/>
<ContactMethods>
<ContactMethod sequenceNum="1" type="phone">800-555-1122</ContactMethod>
</ContactMethods>
</Contact>
</Contacts>
</Address>
<Shipments>
<ReferenceNumbers>
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
</ReferenceNumbers>
</Shipments>
</Event>
</Events>
</Plan>
<Shipments>
<Shipment type="Regular" action="UpdateOrAdd">
<Status>Pending</Status>
<Enterprise name="" customerAcctNum="[customerAccountNum]"/>
<ReferenceNumbers>
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
<ReferenceNumber type="PO Number" isPrimary="false">[customerPO]</ReferenceNumber>
[multieReleaseNumber]
<ReferenceNumber type="Store Number" isPrimary="false">[glCoding]</ReferenceNumber>
<ReferenceNumber type="Profit Center" isPrimary="false">[pfc]</ReferenceNumber>
</ReferenceNumbers>
<Services/>
<EquipmentList/>
6
<Dates>
<Pickup>
<Date type="earliest">[loadingDate]</Date>
<Date type="latest">[loadingDate]</Date>
</Pickup>
<Drop>
<Date type="earliest">[deliveryDate]</Date>
<Date type="latest">[deliveryDate]</Date>
</Drop>
</Dates>
<PriceSheets>
<PriceSheet type="Carrier" isSelected="false">
<ContractId/>
<SCAC/>
<Mode/>
</PriceSheet>
</PriceSheets>
<Shipper>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[plantName]</Name>
<AddrLine1>[plantStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[plantCity]</City>
<StateProvince>[plantState]</StateProvince>
<PostalCode>[plantZipCode]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts>
<Contact type="">
<Name/>
<ContactMethods>
<ContactMethod sequenceNum="1" type="phone">[contactNum]</ContactMethod>
</ContactMethods>
</Contact>
</Contacts>
</Address>
</Shipper>
<Consignee>
<Address type="" isResidential="" isPrimary="false">
<LocationCode/>
<Name>[customer]</Name>
<AddrLine1>[customerStreetAddress]</AddrLine1>
<AddrLine2/>
<City>[customerCity]</City>
<StateProvince>[customerState]</StateProvince>
<PostalCode>[customerZip]</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts />
</Address>
</Consignee>
<ItemGroups>
[items]
</ItemGroups>
<Payment>
<Method>Prepaid</Method>
<BillTo thirdParty="False">
<Address Type="BillTo" isResidential="False">
<Alias/>
<Name>ALPLA</Name>
<AddrLine1>CO TRANSPORTATION INSIGHT</AddrLine1>
<AddrLine2>PO BOX 23000</AddrLine2>
<City>HICKORY</City>
<StateProvince>NC</StateProvince>
<PostalCode>28603</PostalCode>
<CountryCode>USA</CountryCode>
<Contacts/>
</Address>
</BillTo>
</Payment>
</Shipment>
</Shipments>
</Load>
</MercuryGate>
</WebImportFile>
</WebImport>
</data>
</service-request>
`;

View File

@@ -0,0 +1,433 @@
import { xmlPayloadTI } from "./tiFullFlow/tiXmlPayload.js";
import axios from "axios";
import querystring from "querystring";
import { getOrderToSend } from "../../../sqlServer/querys/notifications/ti/getOrderToSend.js";
import { getHeaders } from "../../../sqlServer/querys/notifications/ti/getHeaders.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { db } from "../../../../../database/dbclient.js";
import { settings } from "../../../../../database/schema/settings.js";
import { serverData } from "../../../../../database/schema/serverData.js";
import { eq, sql } from "drizzle-orm";
import { notifications } from "../../../../../database/schema/notifications.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { createLog } from "../../../logger/logger.js";
import { freightClass } from "../../../../globalUtils/freightClass.js";
import { delay } from "../../../../globalUtils/delay.js";
const dateCorrection = (newDate: any) => {
return new Date(newDate)
.toLocaleString("en-US", {
timeZone: "UTC",
year: "numeric",
month: "2-digit",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
hourCycle: "h23", // Ensures 24-hour format
})
.replace(",", "");
};
const tiImport = async () => {
//await initializePool();
// get the plant token
const { data: plantData, error: plantError } = await tryCatch(
db.select().from(settings)
);
//await initializePool();
if (plantError) return;
const plantToken = plantData?.filter((n) => n.name === "plantToken");
const { data: plantInfo, error: plantEr } = await tryCatch(
db
.select()
.from(serverData)
.where(eq(serverData.plantToken, plantToken[0].value))
);
// parsing posting window
const plantI = plantInfo!;
//const postTime = JSON.parse(plantI[0]?.tiPostTime!);
// order notifications
const { data: notificationSet, error: notificationSettingsErr } =
await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, "tiIntergration"))
);
if (notificationSettingsErr) return;
const notiSet: any = notificationSet;
//creds
const userid = "ALPLAWSTEST";
const password = "oe39U1LuLX9ZdY0XKobG";
// const requestID = `ALPLAPBTEST1`; // production will be alpla01-dateTime - this will be the time it was sent over.
const requestUser = "ALPLAWSTEST"; // if alplaprod_rs -- confirm we can use a user name vs the AlplapIMPORT // needs to stay the same as provied
const customerAccountNum = plantI[0].customerTiAcc as string; // ti
// it we dont get anything here we want to make sure we add it in
// get current releaes not in the already sent oders
let orders = getHeaders;
orders = orders
.replaceAll("test1", plantToken[0].value)
.replaceAll("[from]", notiSet?.notifiySettings.start)
.replaceAll("[to]", notiSet?.notifiySettings.end)
.replaceAll(
"[exclude]",
notiSet.notifiySettings.processed
.map((num: any) => `'${num}'`)
.join(", ")
);
//console.log(orders);
let headerPending = [];
try {
headerPending = await query(orders, "Ti get open headers");
} catch (error) {
console.log(error);
}
if (headerPending.length === 0) {
createLog(
"info",
"notification",
"notify",
"There are no pending orders to be sent over to ti."
);
return {
success: true,
code: 1,
message: "There are no pending orders to be sent over to ti.",
};
}
createLog(
"info",
"notification",
"notify",
`There are a total of ${headerPending.length} to send over`
);
// update query to have the correct plant token
let orderToSend = getOrderToSend;
orderToSend = orderToSend
.replaceAll("test1", plantToken[0].value)
.replaceAll("[releaseToProcess]", `'${headerPending[0].releaseNumber}'`)
.replaceAll("[from]", notiSet.notifiySettings.start)
.replaceAll("[to]", notiSet.notifiySettings.end);
// console.log(orderToSend);
let records = [];
try {
records = await query(orderToSend, "Ti send order");
} catch (error) {
console.log(error);
}
//console.log(headerPending.length);
// update the header
let webHeader = `
<request-id>[requestID]</request-id>
<data>
<WebImport>
<WebImportHeader>
<FileName>[requestID].XML</FileName>
<Type>SOTransportLoader</Type>
<UserName>[requestUser]</UserName>
</WebImportHeader>
`;
webHeader = webHeader.replaceAll(
"[requestID]",
`${records[0].releaseNumber}-${plantToken[0].value}`
);
webHeader = webHeader.replaceAll("[requestUser]", requestUser);
// update the special instructions section
const otherSettings = plantI[0]?.otherSettings as {
specialInstructions: string;
active: boolean;
}[];
const specialInfo = otherSettings[0].specialInstructions.replaceAll(
"[header]",
records[0].Header
);
// this part will link into the <ItemGroups></ItemGroups>
let itemGroups = "";
for (let i = 0; i < records.length; i++) {
let newItem = `
<ItemGroup id="" isShipUnit="false" isHandlingUnit="false" sequence="${
i + 1
}">
<ContainedBy id=""/>
<LineItem lineNumber="${i + 1}"/>
<Dimensions>
<Dimension type="Length" uom="IN">${(
records[i].pkgLengh / 25.4
).toFixed(2)}</Dimension>
<Dimension type="Width" uom="IN">${(
records[i].pkgWidth / 25.4
).toFixed(2)}</Dimension>
<Dimension type="Height" uom="IN">${Math.round(
records[i].pkgHeight / 25.4
).toFixed(2)}</Dimension>
</Dimensions>
<Description>${`av ${records[i].article} ${records[i].articleAlias}`}</Description>
<FreightClasses>
<FreightClass type="">${freightClass(
records[i].pkgWeight,
records[i].pkgLengh,
records[i].pkgWidth,
records[i].pkgHeight
)}</FreightClass>
</FreightClasses>
<Commodity/>
<NmfcCode/>
<HazardousMaterial>false</HazardousMaterial>
<HazMatDetail/>
<Weights>
<Weight type="actual" uom="KG">${
records[i].pkgWeight * records[i].Pallets
}</Weight>
</Weights>
<Quantities>
<Quantity type="actual" uom="pallet">${
records[i].Pallets
}</Quantity>
</Quantities>
</ItemGroup>
`;
itemGroups += newItem;
}
// add the full amount of pallets sending over
let fullPalToSend = records.reduce(
(acc: any, o: any) => acc + o.Pallets,
0
);
// rebuild the xml to be properly
let payload = xmlPayloadTI;
payload = payload
.replaceAll(`[WebImportHeader]`, webHeader)
.replaceAll(`[items]`, itemGroups)
.replaceAll(`[customerAccountNum]`, customerAccountNum)
.replaceAll("[fullTotalPal]", fullPalToSend);
// update the main release
//[loadNumber],[shipNumber]
payload = payload.replaceAll(`[shipNumber]`, records[0].releaseNumber);
payload = payload.replaceAll(`[loadNumber]`, records[0].releaseNumber);
// do the multie release if needed
// <ReferenceNumber type="Release Number" isPrimary="false">[multieReleaseNumber]</ReferenceNumber>
let multiRelease = ``;
if (records.length > 0) {
for (let i = 0; i < records.length; i++) {
const newRelease = `
<ReferenceNumber type="Release Number" isPrimary="false">${records[i].releaseNumber}</ReferenceNumber>`;
multiRelease += newRelease;
}
payload = payload.replaceAll("[multieReleaseNumber]", multiRelease);
} else {
payload = payload.replaceAll("[multieReleaseNumber]", "");
}
//update the delivery section
payload = payload.replaceAll(
"[loadingDate]",
dateCorrection(records[0].LoadingDate)
);
payload = payload.replaceAll(
"[deliveryDate]",
dateCorrection(records[0].DeliveryDate)
);
// shipping hours
//<Date type="earliest">[shippingHoursEarly]</Date>
//<Date type="latest">[shippingHoursLate]</Date>
// update teh shipping hours
const now = new Date();
const formattedDate = records[0].LoadingDate.toLocaleDateString("en-US", {
month: "2-digit",
day: "2-digit",
year: "numeric",
});
const shippingHours = JSON.parse(plantI[0]?.shippingHours!);
//console.log(shippingHours);
payload = payload
.replaceAll(
"[shippingHoursEarly]",
`${formattedDate} ${shippingHours[0].early}`
)
.replaceAll(
"[shippingHoursLate]",
`${formattedDate} ${shippingHours[0].late}`
);
payload = payload
.replaceAll("[plantName]", `Alpla ${plantI[0]?.sName!}`)
.replaceAll("[plantStreetAddress]", plantI[0]?.streetAddress!)
.replaceAll("[plantCity]", plantI[0]?.cityState!.split(",")[0])
.replaceAll("[plantState]", plantI[0]?.cityState!.split(",")[1])
.replaceAll("[plantZipCode]", plantI[0]?.zipcode!)
.replaceAll("[contactNum]", plantI[0]?.contactPhone!)
.replaceAll("[contactEmail]", plantI[0]?.contactEmail!)
// customer info
.replaceAll("[customerName]", records[0].addressAlias)
.replaceAll("[customerStreetAddress]", records[0].streetAddress)
.replaceAll("[customerCity]", records[0].city.split(",")[0])
.replaceAll("[customerState]", records[0].city.split(",")[1])
.replaceAll("[customerZip]", records[0].zipCode)
.replaceAll("[customerPO]", records[0].Header)
.replaceAll(
"[glCoding]",
`52410-${
records[0].artileType.toLowerCase() === "preform" ||
records[0].artileType.toLowerCase() === "metalCage"
? 31
: plantI[0].greatPlainsPlantCode
}`
) // {"52410 - " + (artileType.toLowerCase() === "preform" || artileType.toLowerCase() === "metalCage" ? 31: plantInfo[0].greatPlainsPlantCode)}
.replaceAll(
"[pfc]",
`${
records[0].artileType.toLowerCase() === "preform" ||
records[0].artileType.toLowerCase() === "metalCage"
? 40
: records[0].costCenter
}`
);
// special instructions
if (otherSettings[0].specialInstructions.length != 0) {
payload = payload.replaceAll("[specialInstructions]", specialInfo);
}
// update the carrier info if any is needed.
// check the address has a real carrier on it and change to true and put the sacs code in
const hasCarrier = true;
console.log(
`Checking if ${records[0].addressAlias} has scac: ${
records[0].remark.split(",")[0] ? "there was one" : "no scac"
}`
);
const priceSheet = `
<PriceSheets>
<PriceSheet type="Carrier" isSelected="${
records[0].remark.split(",")[0] ? "true" : "false"
}">
<ContractId/>
${
records[0].remark.split(",")[0]
? `<SCAC>${records[0].remark
.split(",")[0]
.split(":")[1]
.toUpperCase()}</SCAC>`
: `<SCAC/>`
}
<Mode/>
</PriceSheet>
</PriceSheets>
`;
payload = payload.replaceAll("[priceSheet]", priceSheet);
// console.log(payload);
//await closePool();
//put the xml into a form
const formBody = querystring.stringify({
userid,
password,
request: payload,
});
axios
.post(
"https://t-insightws.mercurygate.net/MercuryGate/common/remoteService.jsp",
formBody,
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
}
)
.then((response) => {
//console.log(response.data)
console.log("Data was sent over to TI");
})
.catch((error) => console.error(error));
// console.log(payload);
// the order is done so we want to update the processed.
// add the new processed order to this
let notiSettingArray = notiSet.notifiySettings;
if (
!notiSettingArray[0].processed.includes(headerPending[0].releaseNumber)
) {
notiSettingArray[0].processed.push(headerPending[0].releaseNumber);
}
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notiSettingArray,
prodID: 1,
},
})
.where(eq(notifications.name, "tiIntergration"))
);
createLog("info", "ti", "notify", "done with this order");
return { success: true, code: 0, message: "done with this order" };
};
// add a running check so we cant flag it twice
export let tiExportRunning = false;
export const runTiImport = async () => {
let finished = false;
let test: any;
tiExportRunning = true;
do {
createLog("info", "ti", "notify", "processing new data");
// code block to be executed
test = await tiImport();
createLog(
"info",
"ti",
"notify",
`Still more to process? ${test.code === 1 ? "No" : "Yes"}`
);
if (test.code === 1) {
finished = true;
}
await delay(1000 * 5);
} while (!finished);
tiExportRunning = false;
};
export default tiImport;

View File

@@ -0,0 +1,149 @@
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import nodemailer from "nodemailer";
import type { Transporter } from "nodemailer";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import type Mail from "nodemailer/lib/mailer/index.js";
import type { Address } from "nodemailer/lib/mailer/index.js";
import path from "path";
import { fileURLToPath } from "url";
import hbs from "nodemailer-express-handlebars";
import { promisify } from "util";
import { createLog } from "../../logger/logger.js";
import { installed } from "../../../index.js";
interface HandlebarsMailOptions extends Mail.Options {
template: string;
context: Record<string, unknown>; // Use a generic object for context
}
interface EmailData {
email: string;
subject: string;
template: string;
context: [];
}
export const sendEmail = async (data: any): Promise<any> => {
if (!installed) {
createLog("error", "notify", "notify", "server not installed.");
return;
}
let transporter: Transporter;
let fromEmail: string | Address;
const { data: settingData, error: settingError } = await tryCatch(
db.select().from(settings)
);
if (settingError) {
return {
success: false,
message: "There was an error getting the settings.",
settingError,
};
}
// get the plantToken
const server = settingData.filter((n) => n.name === "server");
if (
server[0].value === "localhost" &&
process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD
) {
transporter = nodemailer.createTransport({
service: "gmail",
auth: {
user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD,
},
//debug: true,
});
// update the from email
fromEmail = process.env.EMAIL_USER;
} else {
// convert to the correct plant token.
const plantToken = settingData.filter((s) => s.name === "plantToken");
let host = `${plantToken[0].value}-smtp.alpla.net`;
const testServers = ["test1", "test2", "test3"];
if (testServers.includes(plantToken[0].value)) {
host = "USMCD1-smtp.alpla.net";
}
if (plantToken[0].value === "usiow2") {
host = "USIOW1-smtp.alpla.net";
}
transporter = nodemailer.createTransport({
host: host,
port: 25,
rejectUnauthorized: false,
//secure: false,
// auth: {
// user: "alplaprod",
// pass: "obelix",
// },
debug: true,
} as SMTPTransport.Options);
// update the from email
fromEmail = `noreply@alpla.com`;
}
// creating the handlbar options
const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)),
"../utils/views/"
);
const handlebarOptions = {
viewEngine: {
extname: ".hbs",
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
defaultLayout: "", // Specify the default layout
partialsDir: viewPath,
},
viewPath: viewPath,
extName: ".hbs", // File extension for Handlebars templates
};
transporter.use("compile", hbs(handlebarOptions));
const mailOptions: HandlebarsMailOptions = {
from: fromEmail,
to: data.email,
subject: data.subject,
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
//html: emailTemplate("BlakesTest", "This is an example with css"),
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
context: data.context,
};
// now verify and send the email
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
try {
// Send email and await the result
const info = await sendMailPromise(mailOptions);
createLog(
"info",
"notification",
"system",
`Email was sent to: ${data.email}`
);
return { success: true, message: "Email sent.", data: info };
} catch (err) {
console.log(err);
createLog(
"error",
"notification",
"system",
`Error sending Email: ${JSON.stringify(err)}`
);
return { success: false, message: "Error sending email.", error: err };
}
};

View File

@@ -0,0 +1,61 @@
import { OpenAPIHono } from "@hono/zod-openapi";
import sendemail from "./routes/sendMail.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { db } from "../../../database/dbclient.js";
import { notifications } from "../../../database/schema/notifications.js";
import { createLog } from "../logger/logger.js";
import { note, notificationCreate } from "./utils/masterNotifications.js";
import { startNotificationMonitor } from "./utils/processNotifications.js";
import notifyStats from "./routes/getActiveNotifications.js";
const app = new OpenAPIHono();
const routes = [sendemail, notifyStats] as const;
const appRoutes = routes.forEach((route) => {
app.route("/notify", route);
});
app.all("/notify/*", (c) => {
return c.json({
success: false,
message: "you have encounted a notication route that dose not exist.",
});
});
// check if the mastNotications is changed compared to the db and add if needed.
const { data: notes, error: notesError } = await tryCatch(
db.select().from(notifications)
);
if (notesError) {
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
notesError
)}`
);
}
if (note.length != notes?.length) {
notificationCreate();
createLog("info", "notify", "notify", `New notifcations being added.`);
setTimeout(() => {
startNotificationMonitor();
}, 5 * 1000);
} else {
createLog(
"info",
"notify",
"notify",
`There are know new notifcations. no need to run the update. reminder all changes happen per server.`
);
setTimeout(() => {
startNotificationMonitor();
}, 5 * 1000);
}
export default app;

View File

@@ -0,0 +1,29 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { getAllJobs } from "../utils/processNotifications.js";
const app = new OpenAPIHono({ strict: false });
app.openapi(
createRoute({
tags: ["server"],
summary: "Returns current active notifications.",
method: "get",
path: "/activenotifications",
//middleware: authMiddleware,
responses: responses(),
}),
async (c) => {
const jobs = getAllJobs();
return c.json({
success: true,
message:
jobs.length === 0
? "There are no active Notifications Currently."
: "Current Active notifications",
data: jobs,
});
}
);
export default app;

View File

@@ -0,0 +1,22 @@
// import {Router} from "express";
// import {tiExportRunning, runTiImport} from "../../notification/notification/tiFullFlow/tiImports.js";
// const router = Router();
// router.get("/tiTrigger", async (req, res): Promise<void> => {
// if (tiExportRunning) {
// res.status(200).json({
// success: false,
// message: "There is already a current sesion of the Export running please try again later.",
// });
// }
// // trigger the import
// runTiImport();
// res.status(200).json({
// success: true,
// message: "The Ti Export has been manually started and will continue to run in the background.",
// });
// });
// export default router;

View File

@@ -0,0 +1,73 @@
// an external way to creating logs
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { sendEmail } from "../controller/sendMail.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
const app = new OpenAPIHono({ strict: false });
const EmailSchema = z
.object({
email: z.string().email().openapi({ example: "smith@example.come" }),
subject: z.string().openapi({ example: "Welcome to LST" }),
template: z.string().openapi({ example: "exampleTemplate" }),
context: z
.object({
name: z.string().optional(),
score: z.string().optional(),
})
.optional()
.openapi({}),
})
.openapi("User");
app.openapi(
createRoute({
tags: ["server"],
summary: "Returns current active lots that are tech released",
method: "post",
path: "/sendmail",
middleware: authMiddleware,
request: {
body: {
content: {
"application/json": { schema: EmailSchema },
},
},
},
responses: responses(),
}),
async (c) => {
const { data: bodyData, error: bodyError } = await tryCatch(
c.req.json()
);
if (bodyError) {
return c.json(
{
success: false,
message: "There was an error sending the email",
data: bodyError,
},
400
);
}
const { data: emailData, error: emailError } = await tryCatch(
sendEmail(bodyData)
);
if (emailError) {
return c.json({
success: false,
message: "There was an error sending the email",
data: emailError,
});
}
return c.json({
success: emailData.success,
message: emailData.message,
data: emailData.data,
});
}
);
export default app;

View File

@@ -0,0 +1,113 @@
import { db } from "../../../../database/dbclient.js";
import { notifications } from "../../../../database/schema/notifications.js";
import { createLog } from "../../logger/logger.js";
export const note: any = [
{
name: "reprintLabels",
description:
"Monitors the labels that are printed and returns a value if one falls withing the time frame defined below.",
checkInterval: 1,
timeType: "min",
emails: "",
active: false,
notifiySettings: { prodID: 1 },
},
{
name: "downTimeCheck",
description:
"Checks for specific downtimes that are greater than 105 min.",
checkInterval: 30,
timeType: "min",
emails: "",
active: false,
notifiySettings: { prodID: 1, daysInPast: 5, duration: 105 },
},
{
name: "qualityBlocking",
description:
"Checks for new blocking orders that have been entered, recommened to get the most recent order in here before activating.",
checkInterval: 30,
timeType: "min",
emails: "",
active: false,
notifiySettings: { prodID: 1, sentBlockingOrders: [1] },
},
{
name: "productionCheck",
description: "Checks ppoo",
checkInterval: 2,
timeType: "hour",
emails: "",
active: false,
notifiySettings: {
prodID: 1,
count: 0,
weekend: false,
locations: "0",
},
},
{
name: "stagingCheck",
description:
"Checks staging based on locations, locations need to be seperated by a ,",
checkInterval: 2,
timeType: "hour",
emails: "",
active: false,
notifiySettings: {
prodID: 1,
count: 0,
weekend: false,
locations: "0",
},
},
{
name: "tiIntergration",
description: "Checks for new releases to be put into ti",
checkInterval: 2,
timeType: "hour",
emails: "",
active: false,
notifiySettings: {
prodID: 1,
start: 36,
end: 720,
releases: [1, 2, 3],
},
},
{
name: "exampleNotification",
description: "Checks for new releases to be put into ti",
checkInterval: 2,
timeType: "min",
emails: "",
active: true,
notifiySettings: {
prodID: 1,
start: 36,
end: 720,
releases: [1, 2, 3],
},
},
];
export const notificationCreate = async () => {
for (let i = 0; i < note.length; i++) {
try {
const notify = await db
.insert(notifications)
.values(note[i])
.onConflictDoNothing();
} catch (error) {
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
error
)}`
);
}
}
};

View File

@@ -0,0 +1,158 @@
import { db } from "../../../../database/dbclient.js";
import { notifications } from "../../../../database/schema/notifications.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { Cron } from "croner";
// Store active timeouts by notification ID
export let runningNotifications: Record<string, Cron> = {};
export const startNotificationMonitor = async () => {
// if restarted or crashed we need to make sure the running notifications is cleared
createLog("info", "notify", "notify", `Notification system is now active.`);
setInterval(async () => {
const { data, error } = await tryCatch(db.select().from(notifications));
if (error) {
createLog(
"error",
"notify",
"notify",
`There was an error getting the notifications: ${JSON.stringify(
error
)}`
);
}
const notes: any = data;
for (const note of notes) {
//if we get deactivated remove it.
if (runningNotifications[note.name] && !note.active) {
createLog(
"info",
"notify",
"notify",
`${note.name} was just deactivated`
);
removeNotification(note.name);
}
// if we are not active, no emails, and already in place just stop.
if (
!note.active ||
note.emails === "" ||
runningNotifications[note.name]
) {
//console.log(`Skipping ${note.name} hes already scheduled`);
continue;
}
let time = `*/30 * * * *`; // default to be every 30 min
if (note.timeType === "min") {
console.log(`Creating the min mark here`);
time = `*/${note.checkInterval} * * * *`;
}
if (note.timeType === "hour") {
console.log(`Creating the hour mark here`);
time = `* */${note.checkInterval} * * *`;
}
createJob(note.name, time, async () => {
try {
const { default: runFun } = await import(
`../controller/notifications/${note.name}.js`
);
await runFun(note);
} catch (error: any) {
createLog(
"error",
"notify",
note.name,
`Error running notification: ${error.message}`
);
}
});
//testParse(runningNotifcations[note.name]);
}
}, 5 * 1000);
};
const createJob = (id: string, schedule: string, task: () => Promise<void>) => {
// Destroy existing job if it exists
if (runningNotifications[id]) {
runningNotifications[id].stop(); // Croner uses .stop() instead of .destroy()
}
// Create new job with Croner
runningNotifications[id] = new Cron(
schedule,
{
timezone: "America/Chicago",
catch: true, // Prevents unhandled rejections
},
task
);
// Optional: Add error handling (Croner emits 'error' events)
// runningNotifications[id].on("error", (err) => {
// console.error(`Job ${id} failed:`, err);
// });
};
interface JobInfo {
id: string;
schedule: string;
nextRun: Date | null;
isRunning: boolean;
}
export const getAllJobs = (): JobInfo[] => {
return Object.entries(runningNotifications).map(([id, job]) => ({
id,
schedule: job.getPattern() || "invalid",
nextRun: job.nextRun() || null,
lastRun: job.previousRun() || null,
isRunning: job ? !job.isStopped() : false,
}));
};
const removeNotification = (id: any) => {
if (runningNotifications[id]) {
runningNotifications[id].stop();
delete runningNotifications[id];
}
};
export const stopAllJobs = () => {
Object.values(runningNotifications).forEach((job: any) => job.stop());
runningNotifications = {}; // Clear the object
};
/*
// Pause a job
app.post("/api/jobs/:id/pause", (req, res) => {
runningNotifications[req.params.id]?.pause();
res.json({ success: true });
});
// Resume a job
app.post("/api/jobs/:id/resume", (req, res) => {
runningNotifications[req.params.id]?.resume();
res.json({ success: true });
});
// Delete a job
app.delete("/api/jobs/:id", (req, res) => {
runningNotifications[req.params.id]?.stop();
delete runningNotifications[req.params.id];
res.json({ success: true });
});
*/

Some files were not shown because too many files have changed in this diff Show More