Compare commits
163 Commits
dev
...
d27611d035
| Author | SHA1 | Date | |
|---|---|---|---|
| d27611d035 | |||
| f771db6034 | |||
| 4f3b5d75a2 | |||
| a1f62a3e51 | |||
| de0ee3a61c | |||
| b48dd8fa15 | |||
| 3355eb389c | |||
| b2683d0429 | |||
| 7e484a0f90 | |||
| 77abaed60e | |||
| d10770bc49 | |||
| 1fee4b719b | |||
| 1dce3dccdc | |||
| 3babf8a749 | |||
| 29c9f2d1be | |||
| 459b0f287c | |||
| 7535e15337 | |||
| 68dac0dd28 | |||
| f442cedff2 | |||
| 63e005b790 | |||
| 814861e59c | |||
| dd62ceb133 | |||
| e335225807 | |||
| 3e54685a7e | |||
| f481197d6e | |||
| 053c05c1a7 | |||
| 8bdbc4995c | |||
| bc2336e46d | |||
| 544e82c01d | |||
| 44507d41c4 | |||
| ee3026fa7c | |||
| 5c642805b1 | |||
| 0d06dae6de | |||
| 8a639ceaf8 | |||
| 8c6dc5f690 | |||
| 16b39fd386 | |||
| 075bba95ee | |||
| 6ec9f5827c | |||
| 93941723cc | |||
| fb41fb1dd1 | |||
| 6d5bcde88e | |||
| 1156bbd223 | |||
| bfd4aec4eb | |||
| 63b1151cb7 | |||
| a5dee58223 | |||
| 9784072aab | |||
| f9096b54f5 | |||
| 11e5cf4d86 | |||
| a647d05d3b | |||
| 098c477119 | |||
| 3228ad7892 | |||
| c27ad7cf6a | |||
| a48d4bd5af | |||
| 09c0825194 | |||
| 28fbf2c1e4 | |||
| af5dc9266f | |||
| b903c8ee2d | |||
| 035eda9aa8 | |||
| 1f7c33d871 | |||
| d046c4ea41 | |||
| 88258baf9d | |||
| 82acfcc4a9 | |||
| ba3d721940 | |||
| 27d6b6e884 | |||
| 2c8d1fb710 | |||
| b5de6445b3 | |||
| f9f68ce969 | |||
| 0ced135ec3 | |||
| 7b1c6e1361 | |||
| e3ba45ae13 | |||
| ac7859fda3 | |||
| fb31ae79d1 | |||
| ff1dfdde68 | |||
| f0b9bd599a | |||
| f3103d8c1a | |||
| d557728fa2 | |||
| d58cb5286e | |||
| 7d4733896e | |||
| 175c7226ed | |||
| c32547ceb8 | |||
| a01c0566c2 | |||
| ca4106945b | |||
| 1386e0f00f | |||
| 290f20b86b | |||
| 52171c87fc | |||
| c474536992 | |||
| 600a989226 | |||
| ea7801fccf | |||
| c03b61f48a | |||
| ca552d5587 | |||
| 278c5538bc | |||
| f39ae0f590 | |||
| b4e0f4c666 | |||
| db66302415 | |||
| 90e9bb0ff6 | |||
| bd11feb136 | |||
| 96e7f742fe | |||
| eb051d51f2 | |||
| 7a1a4773e7 | |||
| 878e650e62 | |||
| a31e7ea163 | |||
| 04aa943920 | |||
| af076b8e27 | |||
| c0a0589b3c | |||
| 509ef84726 | |||
| 5ab813f378 | |||
| 5d61eb879e | |||
| 2d4b1db5f4 | |||
| 58f7b4322d | |||
| 3b5e82fdc1 | |||
| f9cd3fb881 | |||
| 51267f5202 | |||
| ceaa25f31e | |||
| 416254353c | |||
| 7f946c095b | |||
| c2aa69ab0a | |||
| 250988800e | |||
| cacfd6d2e0 | |||
| 8d2721b0c2 | |||
| 36cdc7b4bf | |||
| 4d4d6fc7a4 | |||
| 030f9f9aac | |||
| 9e9a56cbb1 | |||
| b01980e1c5 | |||
| fe0c500dcf | |||
| 8a040d15db | |||
| f90066c090 | |||
| 04eb2e3e14 | |||
| 90fb0d364d | |||
| e6e1cecce3 | |||
| 73aa95a693 | |||
| b9f19095cb | |||
| dcb56d4274 | |||
| bc1821132e | |||
| 2551d6c907 | |||
| adf0880659 | |||
| 5149de3199 | |||
| c71b514d9a | |||
| 9254e52808 | |||
| b8c028a6c1 | |||
| 529e922485 | |||
| 5201012235 | |||
| abe53b8f5d | |||
| 836f3e294b | |||
| 96abef762b | |||
| 2c227f9428 | |||
| 46647687dc | |||
| cb01ef1af1 | |||
| b3b5fcec65 | |||
| 3a4dc47a36 | |||
| 63177b523e | |||
| e1cad027d2 | |||
| c1cc355f4f | |||
| 5ed67f3fc0 | |||
| 57e82d2360 | |||
| 9395ec6cd4 | |||
| 0475bb30f9 | |||
| 6843368c36 | |||
| 335ea2deca | |||
| 96814c1115 | |||
| 6dd9ed848b | |||
| 0c5fc1dfb0 | |||
| 5886bea85d |
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
|
||||
"workbench.colorTheme": "Default Dark+",
|
||||
"prettier.tabWidth": 4,
|
||||
"terminal.integrated.env.windows": {},
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.formatOnSave": true
|
||||
|
||||
163
CHANGELOG.md
163
CHANGELOG.md
@@ -1,5 +1,168 @@
|
||||
# All CHanges to LST can be found below.
|
||||
|
||||
## [2.11.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.10.0...v2.11.0) (2025-03-30)
|
||||
|
||||
|
||||
### 🌟 Enhancements
|
||||
|
||||
* **auth:** admin user updates added ([a48d4bd](https://git.tuffraid.net/cowch/lstV2/commits/a48d4bd5afb53d9242a8ba13fef5dab2e2d0e4fe))
|
||||
* **lst:** script added for the test server to readd the permissions once it boots up ([3228ad7](https://git.tuffraid.net/cowch/lstV2/commits/3228ad789206a2fd477060fa6849850af52523b1))
|
||||
* **rfid:** work on the readers and there functions ([b5de644](https://git.tuffraid.net/cowch/lstV2/commits/b5de6445b382e7f7d7c8ce5893138a1a86a56eb2))
|
||||
* **updater:** added in a delete function for the server side ([0ced135](https://git.tuffraid.net/cowch/lstV2/commits/0ced135ec38590fd599e9a45542768b3790eb3db))
|
||||
|
||||
|
||||
### 📝 Chore
|
||||
|
||||
* **release:** bump build number to 100 ([c474536](https://git.tuffraid.net/cowch/lstV2/commits/c474536992659b7ca789a5841441f1634de7a6dc))
|
||||
* **release:** bump build number to 101 ([52171c8](https://git.tuffraid.net/cowch/lstV2/commits/52171c87fc4b5f15d964e6695dd4d9c2710ab961))
|
||||
* **release:** bump build number to 102 ([290f20b](https://git.tuffraid.net/cowch/lstV2/commits/290f20b86b81fa82458c0317215dd614f3de73c2))
|
||||
* **release:** bump build number to 103 ([1386e0f](https://git.tuffraid.net/cowch/lstV2/commits/1386e0f00f54a54f5b7645f2c1c23ebdd3228e59))
|
||||
* **release:** bump build number to 104 ([ca41069](https://git.tuffraid.net/cowch/lstV2/commits/ca4106945b2c09ee495821c8c2452b5254e4df37))
|
||||
* **release:** bump build number to 105 ([a01c056](https://git.tuffraid.net/cowch/lstV2/commits/a01c0566c2413e2ba68106e26eb9dc09c415ba92))
|
||||
* **release:** bump build number to 106 ([c32547c](https://git.tuffraid.net/cowch/lstV2/commits/c32547ceb831c510797da8c3d99745bd2a843a1b))
|
||||
* **release:** bump build number to 107 ([175c722](https://git.tuffraid.net/cowch/lstV2/commits/175c7226eda3cdd3d63febf6e058171c72d0cb5e))
|
||||
* **release:** bump build number to 108 ([7d47338](https://git.tuffraid.net/cowch/lstV2/commits/7d4733896e5f9578c60133cb5c871139e1ac332c))
|
||||
* **release:** bump build number to 109 ([d58cb52](https://git.tuffraid.net/cowch/lstV2/commits/d58cb5286e85f49e327134f2b957c56fdd1a319a))
|
||||
* **release:** bump build number to 110 ([d557728](https://git.tuffraid.net/cowch/lstV2/commits/d557728fa25c33a95ae99559ddc4ce7b2f7a0cad))
|
||||
* **release:** bump build number to 111 ([f3103d8](https://git.tuffraid.net/cowch/lstV2/commits/f3103d8c1a027d3364540b9044a298346bce1ead))
|
||||
* **release:** bump build number to 112 ([f0b9bd5](https://git.tuffraid.net/cowch/lstV2/commits/f0b9bd599a9da86b36dd952b8d3dc47116add380))
|
||||
* **release:** bump build number to 113 ([ff1dfdd](https://git.tuffraid.net/cowch/lstV2/commits/ff1dfdde68335907e8e24f0f011cc611d5de3b0c))
|
||||
* **release:** bump build number to 114 ([fb31ae7](https://git.tuffraid.net/cowch/lstV2/commits/fb31ae79d125c6b672608b56b02d9c7239250d19))
|
||||
* **release:** bump build number to 115 ([ac7859f](https://git.tuffraid.net/cowch/lstV2/commits/ac7859fda34606874a05f8665fe7478cf79d7037))
|
||||
* **release:** bump build number to 116 ([88258ba](https://git.tuffraid.net/cowch/lstV2/commits/88258baf9d71cba38f987851e62363563f28d6ae))
|
||||
* **release:** bump build number to 116 ([82acfcc](https://git.tuffraid.net/cowch/lstV2/commits/82acfcc4a9effbdf5fe8debd4617b3cfca01c0ea))
|
||||
* **release:** bump build number to 117 ([d046c4e](https://git.tuffraid.net/cowch/lstV2/commits/d046c4ea41d7b16f7fb109ae1ab728edd1dbcb97))
|
||||
* **release:** bump build number to 118 ([1f7c33d](https://git.tuffraid.net/cowch/lstV2/commits/1f7c33d871ce408440739bfad16165aa0dd2f982))
|
||||
* **release:** bump build number to 119 ([035eda9](https://git.tuffraid.net/cowch/lstV2/commits/035eda9aa8212443c1147d420fb30fdb09b3ff35))
|
||||
* **release:** bump build number to 120 ([b903c8e](https://git.tuffraid.net/cowch/lstV2/commits/b903c8ee2d9f5c0cb63404f232f235d2f9ddeb81))
|
||||
* **release:** bump build number to 121 ([af5dc92](https://git.tuffraid.net/cowch/lstV2/commits/af5dc9266f5a75c0065615f1526a2ee726b66a02))
|
||||
* **release:** bump build number to 122 ([28fbf2c](https://git.tuffraid.net/cowch/lstV2/commits/28fbf2c1e43996c50eca7d3b7145bd7dec31f1de))
|
||||
* **release:** bump build number to 123 ([09c0825](https://git.tuffraid.net/cowch/lstV2/commits/09c082519467c85240c8ed89123cb260c0011514))
|
||||
* **release:** bump build number to 124 ([098c477](https://git.tuffraid.net/cowch/lstV2/commits/098c477119c5accb0146f7bd125f80197bb44210))
|
||||
* **release:** bump build number to 125 ([a647d05](https://git.tuffraid.net/cowch/lstV2/commits/a647d05d3b8aa0ec3cd8fa1dc05fbed02476769a))
|
||||
* **release:** bump build number to 93 ([b4e0f4c](https://git.tuffraid.net/cowch/lstV2/commits/b4e0f4c66687d8957fa81076687d0504582812aa))
|
||||
* **release:** bump build number to 94 ([f39ae0f](https://git.tuffraid.net/cowch/lstV2/commits/f39ae0f590c17e4fc4b278296f86562c572bc6d2))
|
||||
* **release:** bump build number to 95 ([278c553](https://git.tuffraid.net/cowch/lstV2/commits/278c5538bcea651f06c11f5be9f4948e328261c1))
|
||||
* **release:** bump build number to 96 ([ca552d5](https://git.tuffraid.net/cowch/lstV2/commits/ca552d55878bc52e01186b09b6721af24faa445b))
|
||||
* **release:** bump build number to 97 ([c03b61f](https://git.tuffraid.net/cowch/lstV2/commits/c03b61f48a32cef3e03f3da8d0f70edbcecd0e59))
|
||||
* **release:** bump build number to 98 ([ea7801f](https://git.tuffraid.net/cowch/lstV2/commits/ea7801fccf7ecf176804fd10fac6db91321402d7))
|
||||
* **release:** bump build number to 99 ([600a989](https://git.tuffraid.net/cowch/lstV2/commits/600a98922630f0694971e110db2f8bf997571e36))
|
||||
|
||||
|
||||
### 🛠️ Code Refactor
|
||||
|
||||
* **format changes:** changes to the file formats to match across computers ([9784072](https://git.tuffraid.net/cowch/lstV2/commits/9784072aab3245d52b9a8d0d6a3a5a7716e61e0c))
|
||||
* **logger:** changed log level to be in the env file vs hardcoded ([f9f68ce](https://git.tuffraid.net/cowch/lstV2/commits/f9f68ce969e6d87a23c1be21fd9877d2dfa6f6cc))
|
||||
* **lst:** added huston backin ([11e5cf4](https://git.tuffraid.net/cowch/lstV2/commits/11e5cf4d865f93d65d870bff4bde96288c04db24))
|
||||
* **lst:** added in a removal of old files so we can keep the server clean ([c27ad7c](https://git.tuffraid.net/cowch/lstV2/commits/c27ad7cf6a5ea2ee636c1b25a66773b07a897a3e))
|
||||
* **ocme:** added new error in posting where we know when the pallet is not instock ([27d6b6e](https://git.tuffraid.net/cowch/lstV2/commits/27d6b6e88423ee201e30bbcc3747126c8567801c))
|
||||
* **ocp:** finished the dashboard and move logs and labels to a tab style ([7b1c6e1](https://git.tuffraid.net/cowch/lstV2/commits/7b1c6e1361fcc93729c250db29e828d6d07ca387))
|
||||
* **ocp:** lots of work for rfid and dyco contorl ([ba3d721](https://git.tuffraid.net/cowch/lstV2/commits/ba3d721940e800b61aeba6f3c81d9af40be01c9c))
|
||||
* **server:** removed console logs ([a5dee58](https://git.tuffraid.net/cowch/lstV2/commits/a5dee582236d3c1ab581b63fd463439a4d9e8176))
|
||||
|
||||
|
||||
### 🐛 Bug fixes
|
||||
|
||||
* **admin auth:** added in role change for v1 ([f9096b5](https://git.tuffraid.net/cowch/lstV2/commits/f9096b54f5902d19226e9e5728ffa4c64d8062f9))
|
||||
* **logger:** changes to get the most recent verse aged ([2c8d1fb](https://git.tuffraid.net/cowch/lstV2/commits/2c8d1fb71045dcd241e62b4ee2f1c03ae3690e5b))
|
||||
* **misc:** work on ocp to improve the errors that were missed and better logging ([63b1151](https://git.tuffraid.net/cowch/lstV2/commits/63b1151cb7e1d81b080c28dbec569c851fa1b48a))
|
||||
* **ocme:** fixed to make sure we can always just update a runnning nunmber ([e3ba45a](https://git.tuffraid.net/cowch/lstV2/commits/e3ba45ae13ddc21525c6113bf0f6dca2d2965637))
|
||||
|
||||
## [2.10.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.9.0...v2.10.0) (2025-03-27)
|
||||
|
||||
|
||||
### 📝 Testing Code
|
||||
|
||||
* **ocme:** lots of changes to get it working in production ([6dd9ed8](https://git.tuffraid.net/cowch/lstV2/commits/6dd9ed848bc7d4e8a62778cfe36f89e077187157))
|
||||
|
||||
|
||||
### 📈 Project changes
|
||||
|
||||
* **lst:** changes made to the settings file to work across all pvs ([5149de3](https://git.tuffraid.net/cowch/lstV2/commits/5149de3199d3aaf349b8a4c99d5db83f8d04ae49))
|
||||
|
||||
|
||||
### 🐛 Bug fixes
|
||||
|
||||
* **logistics:** correction to the lane grab ([2d4b1db](https://git.tuffraid.net/cowch/lstV2/commits/2d4b1db5f4697770aee8829764bd85643893d3e8))
|
||||
* **lst:** missing , in versionRc ([c35726b](https://git.tuffraid.net/cowch/lstV2/commits/c35726bf5ccd6565abda37c6618a34e975e70d41))
|
||||
* **ocme:** corrections to posting data for the response was added ([dcb56d4](https://git.tuffraid.net/cowch/lstV2/commits/dcb56d427458c774b462e78daba6fee4afefd525))
|
||||
* **ocme:** fixed some import errors ([b01980e](https://git.tuffraid.net/cowch/lstV2/commits/b01980e1c5a8833b25ea557a2da07b74560526e3))
|
||||
* **ocme:** fixed the camera routes ([51267f5](https://git.tuffraid.net/cowch/lstV2/commits/51267f5202ceebe1c31c819395e1588d47657c38))
|
||||
* **servers:** fixed the weird conflict import that happened ([8a040d1](https://git.tuffraid.net/cowch/lstV2/commits/8a040d15dbf5de5fbb9949b7834c39b93b145aa7))
|
||||
|
||||
|
||||
### 📝 Chore
|
||||
|
||||
* bump build number to 50 ([9bdca33](https://git.tuffraid.net/cowch/lstV2/commits/9bdca3317c7c213f9c5853222eafe1ab028b5f64))
|
||||
* **release:** bump build number to 52 ([5886bea](https://git.tuffraid.net/cowch/lstV2/commits/5886bea85da30fe43635e05de1e9bc4f5789fa64))
|
||||
* **release:** bump build number to 55 ([0c5fc1d](https://git.tuffraid.net/cowch/lstV2/commits/0c5fc1dfb0a8bee5cf7414733a555fe1b5888b8e))
|
||||
* **release:** bump build number to 56 ([335ea2d](https://git.tuffraid.net/cowch/lstV2/commits/335ea2deca54dacda2117849104bf4c24faee3e8))
|
||||
* **release:** bump build number to 57 ([6843368](https://git.tuffraid.net/cowch/lstV2/commits/6843368c3682bb56e5ce4aafbb18367e96a6016e))
|
||||
* **release:** bump build number to 58 ([0475bb3](https://git.tuffraid.net/cowch/lstV2/commits/0475bb30f9d6d4e2d132b15b24d9ab225d8de3b9))
|
||||
* **release:** bump build number to 59 ([9395ec6](https://git.tuffraid.net/cowch/lstV2/commits/9395ec6cd4483f52fcca949a95a4ceecaa843f65))
|
||||
* **release:** bump build number to 60 ([57e82d2](https://git.tuffraid.net/cowch/lstV2/commits/57e82d23603622c301c7e6d636f9cec07d44e0b2))
|
||||
* **release:** bump build number to 61 ([5ed67f3](https://git.tuffraid.net/cowch/lstV2/commits/5ed67f3fc0f99ca5344da8b73fd005184b89670b))
|
||||
* **release:** bump build number to 62 ([c1cc355](https://git.tuffraid.net/cowch/lstV2/commits/c1cc355f4f4e74c3897cada64d961b28d24c07b2))
|
||||
* **release:** bump build number to 63 ([e1cad02](https://git.tuffraid.net/cowch/lstV2/commits/e1cad027d2714ddf3289e31b5c3bbb96306f1f56))
|
||||
* **release:** bump build number to 64 ([63177b5](https://git.tuffraid.net/cowch/lstV2/commits/63177b523e2dd1fabefe52f52dd3c6b3fcff9bcf))
|
||||
* **release:** bump build number to 65 ([3a4dc47](https://git.tuffraid.net/cowch/lstV2/commits/3a4dc47a368bb20f622b7e4647337c5e68150db9))
|
||||
* **release:** bump build number to 66 ([b3b5fce](https://git.tuffraid.net/cowch/lstV2/commits/b3b5fcec651e2bc585ecd4be03be4288867b214f))
|
||||
* **release:** bump build number to 67 ([cb01ef1](https://git.tuffraid.net/cowch/lstV2/commits/cb01ef1af17e9c83753e09a1528e6140a4104273))
|
||||
* **release:** bump build number to 68 ([4664768](https://git.tuffraid.net/cowch/lstV2/commits/46647687dc9938ecf6e72a63f15afc0e29bebcc4))
|
||||
* **release:** bump build number to 69 ([2c227f9](https://git.tuffraid.net/cowch/lstV2/commits/2c227f94287a6ce9c06b0a41772085ba1f4f0cd3))
|
||||
* **release:** bump build number to 70 ([96abef7](https://git.tuffraid.net/cowch/lstV2/commits/96abef762b77361c857642a33acfb69c0bc00666))
|
||||
* **release:** bump build number to 71 ([836f3e2](https://git.tuffraid.net/cowch/lstV2/commits/836f3e294b4d92673388023503e409592ef95ba3))
|
||||
* **release:** bump build number to 72 ([abe53b8](https://git.tuffraid.net/cowch/lstV2/commits/abe53b8f5d9bfbf517c7f56c5d4df2e4586aedbd))
|
||||
* **release:** bump build number to 73 ([5201012](https://git.tuffraid.net/cowch/lstV2/commits/5201012235181975cb89aee8dbc644ca4aa42210))
|
||||
* **release:** bump build number to 74 ([529e922](https://git.tuffraid.net/cowch/lstV2/commits/529e922485303251349c081ad8b2e9bee08dd420))
|
||||
* **release:** bump build number to 75 ([b8c028a](https://git.tuffraid.net/cowch/lstV2/commits/b8c028a6c1fa54afeb049fd42e666bc40690aa4e))
|
||||
* **release:** bump build number to 76 ([9254e52](https://git.tuffraid.net/cowch/lstV2/commits/9254e528086b95ada8c9dc4468f4fbb5b39fbd68))
|
||||
* **release:** bump build number to 77 ([4d4d6fc](https://git.tuffraid.net/cowch/lstV2/commits/4d4d6fc7a4885b4b1652ac125a2b39b8325b0d6e))
|
||||
* **release:** bump build number to 78 ([36cdc7b](https://git.tuffraid.net/cowch/lstV2/commits/36cdc7b4bf3bf90a785bab4d9892e65f84cb162a))
|
||||
* **release:** bump build number to 79 ([8d2721b](https://git.tuffraid.net/cowch/lstV2/commits/8d2721b0c2f6255affcd9ec08427e1b4e6771107))
|
||||
* **release:** bump build number to 80 ([cacfd6d](https://git.tuffraid.net/cowch/lstV2/commits/cacfd6d2e0e11ab7dbc5cb443a58df7bf8d2b8bb))
|
||||
* **release:** bump build number to 81 ([2509888](https://git.tuffraid.net/cowch/lstV2/commits/250988800e1429a1f46005ae54a2a07d31fac3a8))
|
||||
* **release:** bump build number to 82 ([c2aa69a](https://git.tuffraid.net/cowch/lstV2/commits/c2aa69ab0a2f925944abd1b78af6a8698249bff8))
|
||||
* **release:** bump build number to 83 ([7f946c0](https://git.tuffraid.net/cowch/lstV2/commits/7f946c095b8c0208c97bd1bb2c33cf466a04d125))
|
||||
* **release:** bump build number to 84 ([4162543](https://git.tuffraid.net/cowch/lstV2/commits/416254353cd0a926aaf14c343db2ad18f025b230))
|
||||
* **release:** bump build number to 85 ([5d61eb8](https://git.tuffraid.net/cowch/lstV2/commits/5d61eb879e102939df56928cd8d57eda561aabca))
|
||||
* **release:** bump build number to 86 ([5ab813f](https://git.tuffraid.net/cowch/lstV2/commits/5ab813f37894136549de0a05e5b4e2491220d16d))
|
||||
* **release:** bump build number to 87 ([509ef84](https://git.tuffraid.net/cowch/lstV2/commits/509ef8472688ba655c524a1068c3721559f5da11))
|
||||
* **release:** bump build number to 88 ([c0a0589](https://git.tuffraid.net/cowch/lstV2/commits/c0a0589b3c860ae8202a5dd230c18a3463cce857))
|
||||
* **release:** bump build number to 89 ([af076b8](https://git.tuffraid.net/cowch/lstV2/commits/af076b8e27b599c479e4e51f38487cf4cb3cfa34))
|
||||
* **release:** bump build number to 90 ([04aa943](https://git.tuffraid.net/cowch/lstV2/commits/04aa9439205f12bfe10b3fdf76a211a7b8178ac1))
|
||||
* **release:** bump build number to 91 ([a31e7ea](https://git.tuffraid.net/cowch/lstV2/commits/a31e7ea1634fd6b10e22dff4ba93157c2be711ac))
|
||||
* **release:** bump build number to 92 ([878e650](https://git.tuffraid.net/cowch/lstV2/commits/878e650e6237345d825632dff0387a89c7eee088))
|
||||
|
||||
|
||||
### 🌟 Enhancements
|
||||
|
||||
* **logistics:** added in return material by lane name and gets lane id ([58f7b43](https://git.tuffraid.net/cowch/lstV2/commits/58f7b4322d3e523620f827a580ff5534b0be5f6c))
|
||||
* **notify:** intial nofity system added to monitor crashes and rfid wrapper ([eb051d5](https://git.tuffraid.net/cowch/lstV2/commits/eb051d51f21b1ad617851fa3f4a1b8ba2f4fe4ac))
|
||||
* **ocme:** manual camera trigger for the wrapper added ([bc18211](https://git.tuffraid.net/cowch/lstV2/commits/bc1821132e30be6b3a36bae63ce52fd4007f74dd))
|
||||
* **ocp:** add lots with scroll view was added ([9e9a56c](https://git.tuffraid.net/cowch/lstV2/commits/9e9a56cbb15782770daf7e4ab08b31ad23df6c27))
|
||||
* **ocp:** added in printers get and add ([f90066c](https://git.tuffraid.net/cowch/lstV2/commits/f90066c09020ebac03a93059c8e41f8531812c8a))
|
||||
* **ocp:** added labeling logs in ([ceaa25f](https://git.tuffraid.net/cowch/lstV2/commits/ceaa25f31e6da526abd0350881e21984c66b455a))
|
||||
* **ocp:** added wrappercard into the mix ([3b5e82f](https://git.tuffraid.net/cowch/lstV2/commits/3b5e82fdc122824b4f59f00f2ed59b90813694ba))
|
||||
* **ocp:** create and book in plus dyco controller implemented ([7a1a477](https://git.tuffraid.net/cowch/lstV2/commits/7a1a4773e71cab93f36071530dbb5561e7592ec7))
|
||||
* **ocp:** prodlink check added ([f9cd3fb](https://git.tuffraid.net/cowch/lstV2/commits/f9cd3fb8815635fdd0736b573dec86d14b24a6a7))
|
||||
* **server:** clearn up code ([fe0c500](https://git.tuffraid.net/cowch/lstV2/commits/fe0c500dcfe317b3f67d67474fda7cf6872f3f37))
|
||||
|
||||
|
||||
### 🛠️ Code Refactor
|
||||
|
||||
* **frontend:** prettier change to formatting ([c71b514](https://git.tuffraid.net/cowch/lstV2/commits/c71b514d9add69c63e608b22bd8a936fa770b167))
|
||||
* **ocme:** clean up on the getInfo endpoint ([b9f1909](https://git.tuffraid.net/cowch/lstV2/commits/b9f19095cbd86569b58bec99575d924db997e385))
|
||||
* **ocme:** cleaup on pickedup ([73aa95a](https://git.tuffraid.net/cowch/lstV2/commits/73aa95a6937129a36f6ece10ef8d6fd5f01a2b27))
|
||||
* **ocme:** corrections to endpoints to work with ocnme as intneeded ([e6e1cec](https://git.tuffraid.net/cowch/lstV2/commits/e6e1cecce33b3c8cd94cf6372601c92f268b12a5))
|
||||
* **ocme:** removed some info logs as ocme calls alot ([96e7f74](https://git.tuffraid.net/cowch/lstV2/commits/96e7f742fe68cc98de3039bd3dbfb2d27f6d7204))
|
||||
* **ocp:** moved printers to there own folder to keep everything clean ([030f9f9](https://git.tuffraid.net/cowch/lstV2/commits/030f9f9aacdfcca1298a26be4442f5629626ba79))
|
||||
* **rfid:** refactored station 3 (lines) and complete logic ([bd11feb](https://git.tuffraid.net/cowch/lstV2/commits/bd11feb1365ffb058283eb9384684c199ef9fd21))
|
||||
* **rfid:** refactored the way the wrapper works to indlude backup dyco plan ([90e9bb0](https://git.tuffraid.net/cowch/lstV2/commits/90e9bb0ff6a2f598b055fae931a0d3c78f93e868))
|
||||
* **server:** changed to log only when in dev, and removed the redirect of the url ([adf0880](https://git.tuffraid.net/cowch/lstV2/commits/adf08806593fdcd3a3d9d0a6d07f0262501e21ad))
|
||||
* **tcpserver:** just the tcp server doing something ([04eb2e3](https://git.tuffraid.net/cowch/lstV2/commits/04eb2e3e145ba99b330ab627fcd9bae436e17fcf))
|
||||
* **updateserver:** removed ocme from starting back up as it was migrated ([2551d6c](https://git.tuffraid.net/cowch/lstV2/commits/2551d6c9074a0338224d81e690600a7a4b9c9777))
|
||||
|
||||
## [2.9.0](https://git.tuffraid.net/cowch/lstV2/compare/v2.8.0...v2.9.0) (2025-03-23)
|
||||
|
||||
|
||||
|
||||
17
database/migrations/0026_daily_the_twelve.sql
Normal file
17
database/migrations/0026_daily_the_twelve.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
CREATE TABLE "printers" (
|
||||
"printer_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"humanReadableId" text,
|
||||
"name" text NOT NULL,
|
||||
"ipAddress" text,
|
||||
"port" numeric NOT NULL,
|
||||
"status" text,
|
||||
"statusText" text NOT NULL,
|
||||
"lastTimePrinted" text,
|
||||
"assigned" boolean DEFAULT false NOT NULL,
|
||||
"remark" text,
|
||||
"monitorState" boolean DEFAULT false NOT NULL,
|
||||
"add_Date" timestamp DEFAULT now(),
|
||||
"upd_date" timestamp DEFAULT now()
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "humanReadableId" ON "printers" USING btree ("name");
|
||||
3
database/migrations/0027_needy_sleepwalker.sql
Normal file
3
database/migrations/0027_needy_sleepwalker.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "printers" ALTER COLUMN "statusText" DROP NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "printers" ALTER COLUMN "assigned" DROP NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "printers" ALTER COLUMN "monitorState" DROP NOT NULL;
|
||||
1
database/migrations/0028_fast_wong.sql
Normal file
1
database/migrations/0028_fast_wong.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "printers" ALTER COLUMN "port" DROP NOT NULL;
|
||||
2
database/migrations/0029_giant_blue_blade.sql
Normal file
2
database/migrations/0029_giant_blue_blade.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
DROP INDEX "humanReadableId";--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "humanReadableId" ON "printers" USING btree ("humanReadableId");
|
||||
4
database/migrations/0030_conscious_cable.sql
Normal file
4
database/migrations/0030_conscious_cable.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
CREATE TABLE "prodlabels" (
|
||||
"label_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"runningNr" integer NOT NULL
|
||||
);
|
||||
7
database/migrations/0031_loud_alex_power.sql
Normal file
7
database/migrations/0031_loud_alex_power.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE "prodlabels" ADD COLUMN "printerID" integer;--> statement-breakpoint
|
||||
ALTER TABLE "prodlabels" ADD COLUMN "printerName" text;--> statement-breakpoint
|
||||
ALTER TABLE "prodlabels" ADD COLUMN "line" integer;--> statement-breakpoint
|
||||
ALTER TABLE "prodlabels" ADD COLUMN "status" text;--> statement-breakpoint
|
||||
ALTER TABLE "prodlabels" ADD COLUMN "add_date" timestamp;--> statement-breakpoint
|
||||
ALTER TABLE "prodlabels" ADD COLUMN "upd_date" timestamp;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "runningNr" ON "prodlabels" USING btree ("runningNr");
|
||||
4
database/migrations/0032_tough_iron_monger.sql
Normal file
4
database/migrations/0032_tough_iron_monger.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE "prodlabels" ALTER COLUMN "add_date" SET DEFAULT now();--> statement-breakpoint
|
||||
ALTER TABLE "prodlabels" ALTER COLUMN "upd_date" SET DEFAULT now();--> statement-breakpoint
|
||||
ALTER TABLE "prodlabels" ADD COLUMN "add_user" text DEFAULT 'lst';--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "ocme_runningNr" ON "ocmeData" USING btree ("runningNr");
|
||||
13
database/migrations/0033_flimsy_salo.sql
Normal file
13
database/migrations/0033_flimsy_salo.sql
Normal file
@@ -0,0 +1,13 @@
|
||||
CREATE TABLE "notifications" (
|
||||
"notify_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text NOT NULL,
|
||||
"checkInterval" text DEFAULT '1',
|
||||
"timeType" text DEFAULT 'hour',
|
||||
"emails" text,
|
||||
"active" boolean DEFAULT false,
|
||||
"lastRan" timestamp DEFAULT now(),
|
||||
"notifiySettings" jsonb DEFAULT '{}'::jsonb
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "notify_name" ON "notifications" USING btree ("name");
|
||||
1168
database/migrations/meta/0026_snapshot.json
Normal file
1168
database/migrations/meta/0026_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1168
database/migrations/meta/0027_snapshot.json
Normal file
1168
database/migrations/meta/0027_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1168
database/migrations/meta/0028_snapshot.json
Normal file
1168
database/migrations/meta/0028_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1168
database/migrations/meta/0029_snapshot.json
Normal file
1168
database/migrations/meta/0029_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1194
database/migrations/meta/0030_snapshot.json
Normal file
1194
database/migrations/meta/0030_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1246
database/migrations/meta/0031_snapshot.json
Normal file
1246
database/migrations/meta/0031_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1271
database/migrations/meta/0032_snapshot.json
Normal file
1271
database/migrations/meta/0032_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1360
database/migrations/meta/0033_snapshot.json
Normal file
1360
database/migrations/meta/0033_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -183,6 +183,62 @@
|
||||
"when": 1742655504936,
|
||||
"tag": "0025_amusing_sugar_man",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 26,
|
||||
"version": "7",
|
||||
"when": 1742914066219,
|
||||
"tag": "0026_daily_the_twelve",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 27,
|
||||
"version": "7",
|
||||
"when": 1742917145140,
|
||||
"tag": "0027_needy_sleepwalker",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 28,
|
||||
"version": "7",
|
||||
"when": 1742917676211,
|
||||
"tag": "0028_fast_wong",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 29,
|
||||
"version": "7",
|
||||
"when": 1742917978318,
|
||||
"tag": "0029_giant_blue_blade",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 30,
|
||||
"version": "7",
|
||||
"when": 1742938986653,
|
||||
"tag": "0030_conscious_cable",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 31,
|
||||
"version": "7",
|
||||
"when": 1742939306614,
|
||||
"tag": "0031_loud_alex_power",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 32,
|
||||
"version": "7",
|
||||
"when": 1743124980863,
|
||||
"tag": "0032_tough_iron_monger",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 33,
|
||||
"version": "7",
|
||||
"when": 1743424730855,
|
||||
"tag": "0033_flimsy_salo",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
36
database/schema/notifications.ts
Normal file
36
database/schema/notifications.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import {
|
||||
boolean,
|
||||
jsonb,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uniqueIndex,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createSelectSchema } from "drizzle-zod";
|
||||
|
||||
export const notifications = pgTable(
|
||||
"notifications",
|
||||
{
|
||||
notify_id: uuid("notify_id").defaultRandom().primaryKey(),
|
||||
name: text("name").notNull(),
|
||||
description: text("description").notNull(),
|
||||
checkInterval: text("checkInterval").default("1"),
|
||||
timeType: text("timeType").default("hour"),
|
||||
emails: text("emails"),
|
||||
active: boolean("active").default(false),
|
||||
lastRan: timestamp("lastRan").defaultNow(),
|
||||
notifiySettings: jsonb("notifiySettings").default({}),
|
||||
},
|
||||
(table) => [
|
||||
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||
uniqueIndex("notify_name").on(table.name),
|
||||
]
|
||||
);
|
||||
|
||||
// Schema for inserting a user - can be used to validate API requests
|
||||
// export const insertRolesSchema = createInsertSchema(roles, {
|
||||
// name: z.string().min(3, {message: "Role name must be more than 3 letters"}),
|
||||
// });
|
||||
// Schema for selecting a Expenses - can be used to validate API responses
|
||||
export const selectNotificationsSchema = createSelectSchema(notifications);
|
||||
@@ -1,4 +1,13 @@
|
||||
import {text, pgTable, numeric, index, timestamp, boolean, uuid, uniqueIndex} from "drizzle-orm/pg-core";
|
||||
import {
|
||||
text,
|
||||
pgTable,
|
||||
numeric,
|
||||
index,
|
||||
timestamp,
|
||||
boolean,
|
||||
uuid,
|
||||
uniqueIndex,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import { z } from "zod";
|
||||
|
||||
@@ -20,6 +29,7 @@ export const ocmeData = pgTable(
|
||||
(table) => [
|
||||
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||
// uniqueIndex("role_name").on(table.name),
|
||||
uniqueIndex("ocme_runningNr").on(table.runningNr),
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
42
database/schema/printers.ts
Normal file
42
database/schema/printers.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import {
|
||||
text,
|
||||
pgTable,
|
||||
numeric,
|
||||
index,
|
||||
timestamp,
|
||||
boolean,
|
||||
uuid,
|
||||
uniqueIndex,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import { z } from "zod";
|
||||
|
||||
export const printers = pgTable(
|
||||
"printers",
|
||||
{
|
||||
printer_id: uuid("printer_id").defaultRandom().primaryKey(),
|
||||
humanReadableId: text("humanReadableId"),
|
||||
name: text("name").notNull(),
|
||||
ipAddress: text("ipAddress"),
|
||||
port: numeric("port"),
|
||||
status: text("status"),
|
||||
statusText: text("statusText"),
|
||||
lastTimePrinted: text("lastTimePrinted"),
|
||||
assigned: boolean("assigned").default(false),
|
||||
remark: text("remark"),
|
||||
monitorState: boolean("monitorState").default(false),
|
||||
add_Date: timestamp("add_Date").defaultNow(),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
},
|
||||
(table) => [
|
||||
//uniqueIndex("emailUniqueIndex").on(sql`lower(${table.email})`),
|
||||
uniqueIndex("humanReadableId").on(table.humanReadableId),
|
||||
]
|
||||
);
|
||||
|
||||
// Schema for inserting a user - can be used to validate API requests
|
||||
// export const insertRolesSchema = createInsertSchema(roles, {
|
||||
// name: z.string().min(3, {message: "Role name must be more than 3 letters"}),
|
||||
// });
|
||||
// Schema for selecting a Expenses - can be used to validate API responses
|
||||
export const selectRolesSchema = createSelectSchema(printers);
|
||||
30
database/schema/prodLabels.ts
Normal file
30
database/schema/prodLabels.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import {
|
||||
integer,
|
||||
pgTable,
|
||||
uuid,
|
||||
uniqueIndex,
|
||||
text,
|
||||
timestamp,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createSelectSchema } from "drizzle-zod";
|
||||
|
||||
export const prodlabels = pgTable(
|
||||
"prodlabels",
|
||||
{
|
||||
label_id: uuid("label_id").defaultRandom().primaryKey(),
|
||||
printerID: integer("printerID"),
|
||||
printerName: text("printerName"),
|
||||
line: integer("line"),
|
||||
runningNr: integer("runningNr").notNull(),
|
||||
status: text("status"),
|
||||
add_user: text("add_user").default("lst"),
|
||||
add_date: timestamp("add_date").defaultNow(),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
},
|
||||
(table) => [
|
||||
//uniqueIndex("emailUniqueIndex").on(sql`lower(${table.email})`),
|
||||
uniqueIndex("runningNr").on(table.runningNr),
|
||||
]
|
||||
);
|
||||
|
||||
export const prodlabelsSchema = createSelectSchema(prodlabels);
|
||||
@@ -1,4 +1,11 @@
|
||||
import {text, pgTable, timestamp, uuid, uniqueIndex, jsonb} from "drizzle-orm/pg-core";
|
||||
import {
|
||||
text,
|
||||
pgTable,
|
||||
timestamp,
|
||||
uuid,
|
||||
uniqueIndex,
|
||||
jsonb,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createSelectSchema } from "drizzle-zod";
|
||||
import { z } from "zod";
|
||||
import { modules } from "./modules.js";
|
||||
|
||||
817
frontend/package-lock.json
generated
817
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -8,11 +8,12 @@
|
||||
"build": "rimraf dist && tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview",
|
||||
"shad": "npx shadcn@canary add ",
|
||||
"shad": "npx shadcn@latest add ",
|
||||
"checkupdates": "npm-check-updates"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hookform/resolvers": "^4.1.3",
|
||||
"@radix-ui/react-accordion": "^1.2.3",
|
||||
"@radix-ui/react-avatar": "^1.1.3",
|
||||
"@radix-ui/react-checkbox": "^1.1.4",
|
||||
"@radix-ui/react-collapsible": "^1.1.3",
|
||||
@@ -20,12 +21,14 @@
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.6",
|
||||
"@radix-ui/react-label": "^2.1.2",
|
||||
"@radix-ui/react-popover": "^1.1.6",
|
||||
"@radix-ui/react-scroll-area": "^1.2.3",
|
||||
"@radix-ui/react-select": "^2.1.6",
|
||||
"@radix-ui/react-separator": "^1.1.2",
|
||||
"@radix-ui/react-slot": "^1.1.2",
|
||||
"@radix-ui/react-tabs": "^1.1.3",
|
||||
"@radix-ui/react-tooltip": "^1.1.8",
|
||||
"@tailwindcss/vite": "^4.0.15",
|
||||
"@tanstack/react-form": "^1.2.1",
|
||||
"@tanstack/react-query": "^5.69.0",
|
||||
"@tanstack/react-router": "^1.114.27",
|
||||
"@tanstack/react-table": "^8.21.2",
|
||||
@@ -45,6 +48,7 @@
|
||||
"react-dom": "^19.0.0",
|
||||
"react-grid-layout": "^1.5.1",
|
||||
"react-hook-form": "^7.54.2",
|
||||
"react-resizable-panels": "^2.1.7",
|
||||
"sonner": "^2.0.1",
|
||||
"tailwind-merge": "^3.0.2",
|
||||
"tailwindcss": "^4.0.15",
|
||||
|
||||
@@ -35,11 +35,14 @@ export default function ServerPage() {
|
||||
const { modules } = useModuleStore();
|
||||
const router = useRouter();
|
||||
|
||||
const { data, isError, error, isLoading } = useQuery(getServers(token ?? ""));
|
||||
const { data, isError, error, isLoading } = useQuery(
|
||||
getServers(token ?? "")
|
||||
);
|
||||
|
||||
const adminModule = modules.filter((n) => n.name === "admin");
|
||||
const userLevel =
|
||||
user?.roles?.filter((r) => r.module_id === adminModule[0].module_id) || [];
|
||||
user?.roles?.filter((r) => r.module_id === adminModule[0].module_id) ||
|
||||
[];
|
||||
|
||||
if (!adminModule[0]?.roles?.includes(userLevel[0]?.role)) {
|
||||
router.navigate({ to: "/" });
|
||||
@@ -49,7 +52,7 @@ export default function ServerPage() {
|
||||
return <div>{JSON.stringify(error)}</div>;
|
||||
}
|
||||
|
||||
console.log(data);
|
||||
//console.log(data);
|
||||
return (
|
||||
<LstCard className="m-2 flex place-content-center w-dvh">
|
||||
<Table>
|
||||
@@ -89,11 +92,19 @@ export default function ServerPage() {
|
||||
) : (
|
||||
<TableBody>
|
||||
{data?.map((server: Servers) => {
|
||||
const strippedDate = server.lastUpdated.replace("Z", ""); // Remove Z
|
||||
const formattedDate = format(strippedDate, "MM/dd/yyyy hh:mm a");
|
||||
const strippedDate = server.lastUpdated.replace(
|
||||
"Z",
|
||||
""
|
||||
); // Remove Z
|
||||
const formattedDate = format(
|
||||
strippedDate,
|
||||
"MM/dd/yyyy hh:mm a"
|
||||
);
|
||||
return (
|
||||
<TableRow key={server.server_id}>
|
||||
<TableCell className="font-medium">{server.sName}</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{server.sName}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{server.serverDNS}
|
||||
</TableCell>
|
||||
@@ -103,13 +114,20 @@ export default function ServerPage() {
|
||||
<TableCell className="font-medium">
|
||||
{server.idAddress}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">{formattedDate}</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{formattedDate}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{adminUrlCheck() && (
|
||||
<div className="flex flex-row">
|
||||
<UpdateServer server={server} token={token as string} />
|
||||
<UpdateServer
|
||||
server={server}
|
||||
token={token as string}
|
||||
/>
|
||||
<StartServer />
|
||||
<StopServer />
|
||||
<StopServer
|
||||
plantData={server}
|
||||
/>
|
||||
<RestartServer />
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -1,14 +1,46 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {Tooltip, TooltipContent, TooltipProvider, TooltipTrigger} from "@/components/ui/tooltip";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import axios from "axios";
|
||||
import { Octagon } from "lucide-react";
|
||||
|
||||
export default function StopServer() {
|
||||
export default function StopServer(plantData: any) {
|
||||
const token = localStorage.getItem("auth_token");
|
||||
const handleStopServer = async (plant: string) => {
|
||||
let data: any = {
|
||||
processType: "stop",
|
||||
plantToken: plant,
|
||||
};
|
||||
const url: string = window.location.host.split(":")[0];
|
||||
if (url === "localhost") {
|
||||
data = { ...data, remote: "true" };
|
||||
}
|
||||
try {
|
||||
const res = await axios.post("/api/server/serviceprocess", data, {
|
||||
headers: { Authorization: `Bearer ${token}` },
|
||||
});
|
||||
|
||||
console.log(res);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
};
|
||||
return (
|
||||
<div>
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button variant={"outline"} size={"icon"}>
|
||||
<Button
|
||||
variant="destructive"
|
||||
size={"icon"}
|
||||
onClick={() =>
|
||||
handleStopServer(plantData.plantToken)
|
||||
}
|
||||
>
|
||||
<Octagon />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
|
||||
@@ -5,9 +5,20 @@ import {Servers} from "./ServerPage";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { getSettings } from "@/utils/querys/settings";
|
||||
import axios from "axios";
|
||||
import {Tooltip, TooltipContent, TooltipProvider, TooltipTrigger} from "@/components/ui/tooltip";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
|
||||
export default function UpdateServer({server, token}: {server: Servers; token: string}) {
|
||||
export default function UpdateServer({
|
||||
server,
|
||||
token,
|
||||
}: {
|
||||
server: Servers;
|
||||
token: string;
|
||||
}) {
|
||||
const { data } = useQuery(getSettings(token ?? ""));
|
||||
const upgrade = async () => {
|
||||
let devDir = data.filter((n: any) => n.name === "devDir");
|
||||
@@ -29,7 +40,9 @@ export default function UpdateServer({server, token}: {server: Servers; token: s
|
||||
toast.success(result.data.message);
|
||||
}
|
||||
} catch (error: any) {
|
||||
toast.error(`There was an error updating the server: ${error.data.message}`);
|
||||
toast.error(
|
||||
`There was an error updating the server: ${error.data.message}`
|
||||
);
|
||||
}
|
||||
};
|
||||
return (
|
||||
@@ -37,7 +50,12 @@ export default function UpdateServer({server, token}: {server: Servers; token: s
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button variant={"outline"} size={"icon"} onClick={upgrade} disabled={server.isUpgrading}>
|
||||
<Button
|
||||
variant={`${server.isUpgrading ? "ghost" : "outline"}`}
|
||||
size={"icon"}
|
||||
onClick={upgrade}
|
||||
disabled={server.isUpgrading}
|
||||
>
|
||||
<CircleFadingArrowUp />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
|
||||
43
frontend/src/components/admin/user/UserPage.tsx
Normal file
43
frontend/src/components/admin/user/UserPage.tsx
Normal file
@@ -0,0 +1,43 @@
|
||||
import {
|
||||
Accordion,
|
||||
AccordionContent,
|
||||
AccordionItem,
|
||||
AccordionTrigger,
|
||||
} from "@/components/ui/accordion";
|
||||
import { getUsers } from "@/utils/querys/admin/users";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import UserCard from "./components/UserCard";
|
||||
|
||||
export default function UserPage() {
|
||||
const { data, isError, error, isLoading } = useQuery(getUsers());
|
||||
|
||||
if (isLoading) return <div className="m-auto">Loading users...</div>;
|
||||
|
||||
if (isError)
|
||||
return (
|
||||
<div className="m-auto">
|
||||
There was an error getting the users.... {JSON.stringify(error)}
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="m-2 w-dvw">
|
||||
<Accordion type="single" collapsible>
|
||||
{data.map((u: any) => {
|
||||
return (
|
||||
<AccordionItem key={u.user_id} value={u.user_id}>
|
||||
<AccordionTrigger>
|
||||
<span>{u.username}</span>
|
||||
</AccordionTrigger>
|
||||
<AccordionContent>
|
||||
<div>
|
||||
<UserCard user={u} />
|
||||
</div>
|
||||
</AccordionContent>
|
||||
</AccordionItem>
|
||||
);
|
||||
})}
|
||||
</Accordion>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
238
frontend/src/components/admin/user/components/UserCard.tsx
Normal file
238
frontend/src/components/admin/user/components/UserCard.tsx
Normal file
@@ -0,0 +1,238 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectGroup,
|
||||
SelectItem,
|
||||
SelectLabel,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { DebugButton } from "@/utils/formStuff/debugButton";
|
||||
import { userFormOptions } from "@/utils/formStuff/options/userformOptions";
|
||||
import { generatePassword } from "@/utils/passwordGen";
|
||||
import { getUsers } from "@/utils/querys/admin/users";
|
||||
import { useForm } from "@tanstack/react-form";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import axios from "axios";
|
||||
import { toast } from "sonner";
|
||||
|
||||
export default function UserCard(data: any) {
|
||||
const token = localStorage.getItem("auth_token");
|
||||
const { refetch } = useQuery(getUsers());
|
||||
const form = useForm({
|
||||
...userFormOptions(data.user),
|
||||
onSubmit: async ({ value }) => {
|
||||
// Do something with form data
|
||||
|
||||
const userData = { ...value, user_id: data.user.user_id };
|
||||
|
||||
try {
|
||||
const res = await axios.patch(
|
||||
"/api/auth/updateuser",
|
||||
userData,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (res.data.success) {
|
||||
toast.success(res.data.message);
|
||||
refetch();
|
||||
form.reset();
|
||||
} else {
|
||||
res.data.message;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
},
|
||||
});
|
||||
return (
|
||||
<div>
|
||||
<form
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
}}
|
||||
>
|
||||
<form.Field
|
||||
name="username"
|
||||
validators={{
|
||||
// We can choose between form-wide and field-specific validators
|
||||
onChange: ({ value }) =>
|
||||
value.length > 3
|
||||
? undefined
|
||||
: "Username must be longer than 3 letters",
|
||||
}}
|
||||
children={(field) => {
|
||||
return (
|
||||
<div className="m-2 min-w-48 max-w-96 p-2">
|
||||
<Label htmlFor="username">Username</Label>
|
||||
<Input
|
||||
name={field.name}
|
||||
value={field.state.value}
|
||||
onBlur={field.handleBlur}
|
||||
//type="number"
|
||||
onChange={(e) =>
|
||||
field.handleChange(e.target.value)
|
||||
}
|
||||
/>
|
||||
{field.state.meta.errors.length ? (
|
||||
<em>{field.state.meta.errors.join(",")}</em>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<form.Field
|
||||
name="email"
|
||||
validators={{
|
||||
// We can choose between form-wide and field-specific validators
|
||||
onChange: ({ value }) =>
|
||||
value.length > 3
|
||||
? undefined
|
||||
: "You must enter a correct ",
|
||||
}}
|
||||
children={(field) => {
|
||||
return (
|
||||
<div className="m-2 min-w-48 max-w-96 p-2">
|
||||
<Label htmlFor="email">Email</Label>
|
||||
<Input
|
||||
name={field.name}
|
||||
value={field.state.value}
|
||||
onBlur={field.handleBlur}
|
||||
//type="number"
|
||||
onChange={(e) =>
|
||||
field.handleChange(e.target.value)
|
||||
}
|
||||
/>
|
||||
{field.state.meta.errors.length ? (
|
||||
<em>{field.state.meta.errors.join(",")}</em>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<form.Field
|
||||
name="role"
|
||||
//listeners={{onChange: ({value})=>{}}}
|
||||
children={(field) => {
|
||||
return (
|
||||
<div className="m-2 min-w-48 max-w-96 p-2">
|
||||
<Label htmlFor={field.name}>Select role</Label>
|
||||
<Select
|
||||
value={field.state.value}
|
||||
onValueChange={field.handleChange}
|
||||
>
|
||||
<SelectTrigger className="w-[180px]">
|
||||
<SelectValue
|
||||
id={field.name}
|
||||
placeholder="Select Role"
|
||||
/>
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectGroup>
|
||||
<SelectLabel>Roles</SelectLabel>
|
||||
<SelectItem value="viewer">
|
||||
Viewer
|
||||
</SelectItem>
|
||||
<SelectItem value="operator">
|
||||
Operator
|
||||
</SelectItem>
|
||||
<SelectItem value="manager">
|
||||
Manager
|
||||
</SelectItem>
|
||||
<SelectItem value="admin">
|
||||
Admin
|
||||
</SelectItem>
|
||||
</SelectGroup>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<form.Field
|
||||
name="password"
|
||||
validators={{
|
||||
onChangeAsyncDebounceMs: 500,
|
||||
onChangeAsync: ({ value }) => {
|
||||
if (
|
||||
window.location.pathname.includes("/users") &&
|
||||
value.length === 0
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (value.length < 4) {
|
||||
return "Password must be at least 4 characters long.";
|
||||
}
|
||||
|
||||
if (!/[A-Z]/.test(value)) {
|
||||
return "Password must contain at least one uppercase letter.";
|
||||
}
|
||||
|
||||
if (!/[a-z]/.test(value)) {
|
||||
return "Password must contain at least one lower case letter.";
|
||||
}
|
||||
|
||||
if (!/[0-9]/.test(value)) {
|
||||
return "Password must contain at least one number.";
|
||||
}
|
||||
|
||||
if (
|
||||
!/[!@#$%^&*()_+\-=\[\]{};':"\\|,.<>\/?]/.test(
|
||||
value
|
||||
)
|
||||
) {
|
||||
return "Password must contain at least one special character.";
|
||||
}
|
||||
},
|
||||
}}
|
||||
children={(field) => {
|
||||
return (
|
||||
<div className="m-2 p-2">
|
||||
<Label htmlFor="password">
|
||||
Change Password
|
||||
</Label>
|
||||
<div className="mt-2 flex flex-row">
|
||||
<Input
|
||||
className="min-w-48 max-w-96"
|
||||
name={field.name}
|
||||
value={field.state.value}
|
||||
onBlur={field.handleBlur}
|
||||
//type="number"
|
||||
onChange={(e) =>
|
||||
field.handleChange(e.target.value)
|
||||
}
|
||||
/>
|
||||
<Button
|
||||
className="ml-2"
|
||||
onClick={() =>
|
||||
field.handleChange(
|
||||
generatePassword(8)
|
||||
)
|
||||
}
|
||||
>
|
||||
Random password
|
||||
</Button>
|
||||
<DebugButton data={form.state.values} />
|
||||
</div>
|
||||
{field.state.meta.errors.length ? (
|
||||
<em>{field.state.meta.errors.join(",")}</em>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</form>
|
||||
<div>
|
||||
<Button onClick={form.handleSubmit}>Save</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,4 +1,14 @@
|
||||
import {Atom, Logs, Minus, Plus, Server, Settings, ShieldCheck, Users, Webhook} from "lucide-react";
|
||||
import {
|
||||
Atom,
|
||||
Logs,
|
||||
Minus,
|
||||
Plus,
|
||||
Server,
|
||||
Settings,
|
||||
ShieldCheck,
|
||||
Users,
|
||||
Webhook,
|
||||
} from "lucide-react";
|
||||
import {
|
||||
SidebarGroup,
|
||||
SidebarGroupContent,
|
||||
@@ -10,7 +20,11 @@ import {
|
||||
SidebarMenuSubButton,
|
||||
SidebarMenuSubItem,
|
||||
} from "../../ui/sidebar";
|
||||
import {Collapsible, CollapsibleContent, CollapsibleTrigger} from "../../ui/collapsible";
|
||||
import {
|
||||
Collapsible,
|
||||
CollapsibleContent,
|
||||
CollapsibleTrigger,
|
||||
} from "../../ui/collapsible";
|
||||
|
||||
const items = [
|
||||
{
|
||||
@@ -53,9 +67,9 @@ const data = {
|
||||
},
|
||||
{
|
||||
title: "Users",
|
||||
url: "#",
|
||||
url: "/users",
|
||||
icon: Users,
|
||||
isActive: false,
|
||||
isActive: true,
|
||||
},
|
||||
{
|
||||
title: "UCD",
|
||||
@@ -82,7 +96,11 @@ export function AdminSideBar() {
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{data.navMain.map((item, index) => (
|
||||
<Collapsible key={item.title} defaultOpen={index === 1} className="group/collapsible">
|
||||
<Collapsible
|
||||
key={item.title}
|
||||
defaultOpen={index === 1}
|
||||
className="group/collapsible"
|
||||
>
|
||||
<SidebarMenuItem>
|
||||
<CollapsibleTrigger asChild>
|
||||
<SidebarMenuButton>
|
||||
@@ -96,15 +114,25 @@ export function AdminSideBar() {
|
||||
<CollapsibleContent>
|
||||
<SidebarMenuSub>
|
||||
{item.items.map((item) => (
|
||||
<SidebarMenuSubItem key={item.title}>
|
||||
<SidebarMenuSubItem
|
||||
key={item.title}
|
||||
>
|
||||
{item.isActive && (
|
||||
<SidebarMenuSubButton asChild>
|
||||
<SidebarMenuSubButton
|
||||
asChild
|
||||
>
|
||||
<a
|
||||
href={item.url}
|
||||
target={item.newWindow ? "_blank" : "_self"}
|
||||
target={
|
||||
item.newWindow
|
||||
? "_blank"
|
||||
: "_self"
|
||||
}
|
||||
>
|
||||
<item.icon />
|
||||
<span>{item.title}</span>
|
||||
<span>
|
||||
{item.title}
|
||||
</span>
|
||||
</a>
|
||||
</SidebarMenuSubButton>
|
||||
)}
|
||||
|
||||
131
frontend/src/components/ocme/WrapperCard.tsx
Normal file
131
frontend/src/components/ocme/WrapperCard.tsx
Normal file
@@ -0,0 +1,131 @@
|
||||
import axios from "axios";
|
||||
import { LstCard } from "../extendedUI/LstCard";
|
||||
import { Button } from "../ui/button";
|
||||
import { ScrollArea } from "../ui/scroll-area";
|
||||
import { Skeleton } from "../ui/skeleton";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "../ui/table";
|
||||
import { toast } from "sonner";
|
||||
|
||||
const currentPallets = [
|
||||
{ key: "line", label: "Line" },
|
||||
{ key: "runningNr", label: "Running #" },
|
||||
{ key: "upd_date", label: "Date Scanned" },
|
||||
{ key: "waitingfor", label: "Waiting For" },
|
||||
{ key: "clear", label: "Clear" },
|
||||
];
|
||||
|
||||
const currentTags = [
|
||||
{ key: "line", label: "Line" },
|
||||
{ key: "printerName", label: "Printer" },
|
||||
{ key: "runningNr", label: "Running #" },
|
||||
{ key: "upd_date", label: "Label date" },
|
||||
{ key: "status", label: "Label Status" },
|
||||
];
|
||||
export default function WrapperManualTrigger() {
|
||||
const cameraTrigger = async () => {
|
||||
try {
|
||||
const res = await axios.get("/ocme/api/v1/manualCameraTrigger");
|
||||
|
||||
if (res.data.success) {
|
||||
toast.success(res.data.message);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!res.data.success) {
|
||||
toast.error(res.data.message);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
//stoast.success(error.data.message);
|
||||
}
|
||||
};
|
||||
return (
|
||||
<LstCard className="m-2 p-2">
|
||||
<ScrollArea className="max-h-[200px]">
|
||||
<span>Wrapper Pallet Info</span>
|
||||
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
{currentPallets.map((l) => (
|
||||
<TableHead key={l.key}>{l.label}</TableHead>
|
||||
))}
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{Array(3)
|
||||
.fill(0)
|
||||
.map((_, i) => (
|
||||
<TableRow key={i}>
|
||||
<TableCell className="font-medium">
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</ScrollArea>
|
||||
<ScrollArea className="max-h-[200px]">
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
{currentTags.map((l) => (
|
||||
<TableHead key={l.key}>{l.label}</TableHead>
|
||||
))}
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{Array(3)
|
||||
.fill(0)
|
||||
.map((_, i) => (
|
||||
<TableRow key={i}>
|
||||
<TableCell className="font-medium">
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</ScrollArea>
|
||||
<div>
|
||||
<hr />
|
||||
<p className="text-center mb-3">Manual Triggers</p>
|
||||
<div className="flex flex-row justify-between">
|
||||
<Button onClick={cameraTrigger}>Camera</Button>
|
||||
<Button>Rfid</Button>
|
||||
</div>
|
||||
</div>
|
||||
</LstCard>
|
||||
);
|
||||
}
|
||||
@@ -2,10 +2,23 @@ import {toast} from "sonner";
|
||||
import { LstCard } from "../extendedUI/LstCard";
|
||||
import { Button } from "../ui/button";
|
||||
import { Input } from "../ui/input";
|
||||
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "../ui/table";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "../ui/table";
|
||||
import { Skeleton } from "../ui/skeleton";
|
||||
//import CycleCountLog from "./CycleCountLog";
|
||||
import {Select, SelectContent, SelectItem, SelectTrigger, SelectValue} from "../ui/select";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "../ui/select";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import axios from "axios";
|
||||
import { useState } from "react";
|
||||
@@ -28,13 +41,22 @@ export default function OcmeCycleCount() {
|
||||
setCounting(true);
|
||||
toast.success(`Cycle count started`);
|
||||
try {
|
||||
const res = await axios.post("/ocme/api/v1/cyclecount", data, {
|
||||
const res = await axios.post("/ocme/api/v1/cycleCount", data, {
|
||||
headers: { Authorization: `Bearer ${token}` },
|
||||
});
|
||||
|
||||
if (res.data.success) {
|
||||
toast.success(res.data.message);
|
||||
setData(res.data.data);
|
||||
setCounting(false);
|
||||
reset();
|
||||
}
|
||||
|
||||
if (res.data.success) {
|
||||
toast.success(res.data.message);
|
||||
|
||||
setCounting(false);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error("There was an error cycle counting");
|
||||
setCounting(false);
|
||||
@@ -45,20 +67,25 @@ export default function OcmeCycleCount() {
|
||||
<div className="flex flex-row w-screen">
|
||||
<div className="m-2 w-5/6">
|
||||
<LstCard>
|
||||
<p className="ml-2">Please enter the name or laneID you want to cycle count.</p>
|
||||
<p className="ml-2">
|
||||
Please enter the name or laneID you want to cycle count.
|
||||
</p>
|
||||
<div>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<div className="flex justify-between">
|
||||
<div className="m-2 flex flex-row">
|
||||
<Input
|
||||
placeholder="enter lane: L064"
|
||||
className={errors.lane ? "border-red-500" : ""}
|
||||
className={
|
||||
errors.lane ? "border-red-500" : ""
|
||||
}
|
||||
aria-invalid={!!errors.lane}
|
||||
{...register("lane", {
|
||||
required: true,
|
||||
minLength: {
|
||||
value: 3,
|
||||
message: "The lane is too short!",
|
||||
message:
|
||||
"The lane is too short!",
|
||||
},
|
||||
})}
|
||||
/>
|
||||
@@ -72,21 +99,35 @@ export default function OcmeCycleCount() {
|
||||
fieldState: {},
|
||||
//formState,
|
||||
}) => (
|
||||
<Select onValueChange={onChange}>
|
||||
<Select
|
||||
onValueChange={onChange}
|
||||
>
|
||||
<SelectTrigger className="w-[180px]">
|
||||
<SelectValue placeholder="Select name or id" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="name">Name</SelectItem>
|
||||
<SelectItem value="laneId">Lane ID</SelectItem>
|
||||
<SelectItem value="name">
|
||||
Name
|
||||
</SelectItem>
|
||||
<SelectItem value="laneId">
|
||||
Lane ID
|
||||
</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Button className="m-2" type="submit" disabled={counting}>
|
||||
{counting ? <span>Counting...</span> : <span>CycleCount</span>}
|
||||
<Button
|
||||
className="m-2"
|
||||
type="submit"
|
||||
disabled={counting}
|
||||
>
|
||||
{counting ? (
|
||||
<span>Counting...</span>
|
||||
) : (
|
||||
<span>CycleCount</span>
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
@@ -105,7 +146,7 @@ export default function OcmeCycleCount() {
|
||||
<TableHead>Result</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
{data.length === 0 ? (
|
||||
{data?.length === 0 ? (
|
||||
<TableBody>
|
||||
{Array(10)
|
||||
.fill(0)
|
||||
@@ -142,7 +183,9 @@ export default function OcmeCycleCount() {
|
||||
<>
|
||||
{data.map((i: any) => {
|
||||
let classname = ``;
|
||||
if (i.info === "Quality Check Required") {
|
||||
if (
|
||||
i.info === "Quality Check Required"
|
||||
) {
|
||||
classname = `bg-red-500`;
|
||||
}
|
||||
if (i.info === "Sent to Inv") {
|
||||
@@ -150,24 +193,46 @@ export default function OcmeCycleCount() {
|
||||
}
|
||||
return (
|
||||
<TableRow key={i.runningNumber}>
|
||||
<TableCell className={`font-medium ${classname}`}>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.alpla_laneID}
|
||||
</TableCell>
|
||||
<TableCell className={`font-medium ${classname}`}>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.alpla_laneDescription}
|
||||
</TableCell>
|
||||
<TableCell className={`font-medium ${classname}`}>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.Article}
|
||||
</TableCell>
|
||||
<TableCell className={`font-medium ${classname}`}>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.alpla_laneDescription}
|
||||
</TableCell>
|
||||
<TableCell className={`font-medium ${classname}`}>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.runningNumber}
|
||||
</TableCell>
|
||||
<TableCell className={`font-medium ${classname}`}>{i.ocme}</TableCell>
|
||||
<TableCell className={`font-medium ${classname}`}>{i.stock}</TableCell>
|
||||
<TableCell className={`font-medium ${classname}`}>{i.info}</TableCell>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.ocme}
|
||||
</TableCell>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.stock}
|
||||
</TableCell>
|
||||
<TableCell
|
||||
className={`font-medium ${classname}`}
|
||||
>
|
||||
{i.info}
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { LstCard } from "@/components/extendedUI/LstCard";
|
||||
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "@/components/ui/table";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
// import {useSessionStore} from "@/lib/store/sessionStore";
|
||||
// import {useSettingStore} from "@/lib/store/useSettings";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
@@ -66,7 +73,7 @@ export default function LabelLog() {
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const labelData = data ? data : [];
|
||||
return (
|
||||
<LstCard className="m-2 p-2 min-h-2/5">
|
||||
<p className="text-center">Labels for the last 2 hours</p>
|
||||
@@ -106,15 +113,26 @@ export default function LabelLog() {
|
||||
</>
|
||||
) : (
|
||||
<TableBody>
|
||||
{data?.map((label: any) => (
|
||||
{labelData.map((label: any) => (
|
||||
<TableRow key={label.runningNr}>
|
||||
<TableCell className="font-medium">{label.line}</TableCell>
|
||||
<TableCell className="font-medium">{label.printerName}</TableCell>
|
||||
<TableCell className="font-medium">{label.runningNr}</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{format(label.upd_date, "M/d/yyyy hh:mm")}
|
||||
{label.line}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{label.printerName}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{label.runningNr}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{format(
|
||||
label?.upd_date.replace("Z", ""),
|
||||
"M/d/yyyy hh:mm"
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{label.status}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">{label.status}</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { LstCard } from "@/components/extendedUI/LstCard";
|
||||
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "@/components/ui/table";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { useSessionStore } from "@/lib/store/sessionStore";
|
||||
import { useSettingStore } from "@/lib/store/useSettings";
|
||||
import { LotType } from "@/types/lots";
|
||||
@@ -9,6 +16,7 @@ import {getlots} from "@/utils/querys/production/lots";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import ManualPrint from "./ManualPrinting/ManualPrint";
|
||||
import ManualPrintForm from "./ManualPrinting/ManualPrintForm";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
|
||||
let lotColumns = [
|
||||
{
|
||||
@@ -62,8 +70,6 @@ export default function Lots() {
|
||||
const { settings } = useSettingStore();
|
||||
const server = settings.filter((n) => n.name === "server")[0]?.value || "";
|
||||
|
||||
console.log(server);
|
||||
|
||||
const roles = ["admin", "manager", "operator"];
|
||||
|
||||
if (user && roles.includes(user.role)) {
|
||||
@@ -83,13 +89,16 @@ export default function Lots() {
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="m-2 p-2 min-h-2/5">
|
||||
<ScrollArea className="max-h-1/2 rounded-md border p-4">
|
||||
<LstCard>
|
||||
<p className="text-center">Current Assigned lots</p>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
{lotColumns.map((l) => (
|
||||
<TableHead key={l.key}>{l.label}</TableHead>
|
||||
<TableHead key={l.key}>
|
||||
{l.label}
|
||||
</TableHead>
|
||||
))}
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
@@ -131,12 +140,14 @@ export default function Lots() {
|
||||
</TableBody>
|
||||
</Table>
|
||||
</LstCard>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<LstCard className="m-2 p-2 min-h-2/5">
|
||||
<ScrollArea className="h-[400px]">
|
||||
<p className="text-center">Current Assigned lots</p>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
@@ -188,21 +199,40 @@ export default function Lots() {
|
||||
<TableBody>
|
||||
{data?.map((lot: LotType) => (
|
||||
<TableRow key={lot.LabelOnlineID}>
|
||||
<TableCell className="font-medium">{lot.MachineLocation}</TableCell>
|
||||
<TableCell className="font-medium">{lot.AV}</TableCell>
|
||||
<TableCell className="font-medium">{lot.Alias}</TableCell>
|
||||
<TableCell className="font-medium">{lot.LOT}</TableCell>
|
||||
<TableCell className="font-medium">{lot.ProlinkLot}</TableCell>
|
||||
<TableCell className="font-medium">{lot.PlannedQTY}</TableCell>
|
||||
<TableCell className="font-medium">{lot.Produced}</TableCell>
|
||||
<TableCell className="font-medium">{lot.Remaining}</TableCell>
|
||||
<TableCell className="font-medium">{lot.overPrinting}</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.MachineLocation}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.AV}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.Alias}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.LOT}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.ProlinkLot}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.PlannedQTY}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.Produced}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.Remaining}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{lot.overPrinting}
|
||||
</TableCell>
|
||||
{user && roles.includes(user.role) && (
|
||||
<>
|
||||
{server === "usday1vms006" || server === "localhost" ? (
|
||||
{server === "usday1vms006" ||
|
||||
server === "localhost" ? (
|
||||
<>
|
||||
<TableCell className="flex justify-center">
|
||||
<ManualPrintForm lot={lot} />
|
||||
<ManualPrintForm />
|
||||
</TableCell>
|
||||
</>
|
||||
) : (
|
||||
@@ -217,6 +247,7 @@ export default function Lots() {
|
||||
</TableBody>
|
||||
)}
|
||||
</Table>
|
||||
</ScrollArea>
|
||||
</LstCard>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -20,31 +20,31 @@ import {
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { Textarea } from "@/components/ui/textarea";
|
||||
import {useSessionStore} from "@/lib/store/sessionStore";
|
||||
import { useSettingStore } from "@/lib/store/useSettings";
|
||||
import {LotType} from "@/types/lots";
|
||||
import axios from "axios";
|
||||
import { Tag } from "lucide-react";
|
||||
import { useState } from "react";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
import {manualPrintLabels} from "./ManualPrintLabel";
|
||||
|
||||
const printReason = [
|
||||
{ key: "printerIssue", label: "Printer Related" },
|
||||
{ key: "missingRfidTag", label: "Missing or incorrect tag" },
|
||||
{ key: "strapper", label: "Strapper Error" },
|
||||
{ key: "manualCheck", label: "20th pallet check" },
|
||||
{ key: "outOfSync", label: "Labeler Out of Sync" },
|
||||
];
|
||||
|
||||
export default function ManualPrintForm({lot}: {lot: LotType}) {
|
||||
const {user} = useSessionStore();
|
||||
export default function ManualPrintForm() {
|
||||
const token = localStorage.getItem("auth_token");
|
||||
const { settings } = useSettingStore();
|
||||
const [open, setOpen] = useState(false);
|
||||
const server = settings.filter((n) => n.name === "server")[0]?.value;
|
||||
// const serverPort = settings.filter((n) => n.name === "serverPort")[0]?.value;
|
||||
// const serverUrl = `http://${server}:${serverPort}`;
|
||||
|
||||
// what is the dyco set to? rfid or dyco
|
||||
const dyco = settings.filter((n) => n.name === "dycoPrint");
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
@@ -54,27 +54,22 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
|
||||
control,
|
||||
} = useForm();
|
||||
|
||||
const handlePrintLabel = async (lot: LotType) => {
|
||||
//console.log(lot);
|
||||
const labels: any = await manualPrintLabels(lot, user);
|
||||
|
||||
if (labels.success) {
|
||||
toast.success(labels.message);
|
||||
} else {
|
||||
toast.error(labels.message);
|
||||
}
|
||||
};
|
||||
|
||||
const handleManualPrintLog = async (logData: any, lot: LotType) => {
|
||||
const handleManualPrintLog = async (logData: any) => {
|
||||
// toast.success(`A new label was sent to printer: ${lot.PrinterName} for line ${lot.MachineDescription} `);
|
||||
const logdataUrl = `/api/ocp/manualLabelLog`;
|
||||
const logdataUrl = `/api/ocp/manuallabellog`;
|
||||
axios
|
||||
.post(logdataUrl, logData, {headers: {Authorization: `Bearer ${token}`}})
|
||||
.post(logdataUrl, logData, {
|
||||
headers: { Authorization: `Bearer ${token}` },
|
||||
})
|
||||
.then((d) => {
|
||||
//console.log(d);
|
||||
console.log(d);
|
||||
if (d.data.success) {
|
||||
toast.success(d.data.message);
|
||||
handlePrintLabel(lot);
|
||||
} else {
|
||||
toast.error(d.data.message);
|
||||
}
|
||||
reset();
|
||||
setOpen(false);
|
||||
})
|
||||
.catch((e) => {
|
||||
if (e.response.status === 500) {
|
||||
@@ -91,9 +86,14 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
|
||||
};
|
||||
|
||||
const onSubmit = (data: any) => {
|
||||
console.log(data);
|
||||
//console.log(data);
|
||||
|
||||
handleManualPrintLog(data, lot);
|
||||
handleManualPrintLog(data);
|
||||
};
|
||||
|
||||
const closeForm = () => {
|
||||
reset();
|
||||
setOpen(false);
|
||||
};
|
||||
return (
|
||||
<Dialog
|
||||
@@ -117,12 +117,14 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
|
||||
<DialogHeader>
|
||||
<DialogTitle>Edit profile</DialogTitle>
|
||||
<DialogDescription>
|
||||
Make changes to your profile here. Click save when you're done.
|
||||
Make changes to your profile here. Click save when
|
||||
you're done.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<p>
|
||||
To manually print a label you must complete all the required fields below.
|
||||
To manually print a label you must complete all the
|
||||
required fields below.
|
||||
<br />
|
||||
If you clicked this in error just click close
|
||||
</p>
|
||||
@@ -143,35 +145,46 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectGroup>
|
||||
<SelectLabel>Print Reasons</SelectLabel>
|
||||
{printReason.map((printReason: any) => (
|
||||
<SelectItem value={printReason.key}>{printReason.label}</SelectItem>
|
||||
))}
|
||||
<SelectLabel>
|
||||
Print Reasons
|
||||
</SelectLabel>
|
||||
{printReason.map(
|
||||
(printReason: any) => (
|
||||
<SelectItem
|
||||
value={printReason.key}
|
||||
>
|
||||
{printReason.label}
|
||||
</SelectItem>
|
||||
)
|
||||
)}
|
||||
</SelectGroup>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
)}
|
||||
/>
|
||||
) : (
|
||||
<div>
|
||||
<div className="m-2">
|
||||
<Label htmlFor="printRason" className="m-1">
|
||||
Why are you manually printing?
|
||||
</Label>
|
||||
<Input
|
||||
type="text"
|
||||
className={errors.printReason ? "border-red-500" : ""}
|
||||
className={
|
||||
errors.printReason ? "border-red-500" : ""
|
||||
}
|
||||
aria-invalid={!!errors.printReason}
|
||||
{...register("printReason", {
|
||||
required: true,
|
||||
minLength: {
|
||||
value: 5,
|
||||
message: "To short of a reason please try again!",
|
||||
message:
|
||||
"To short of a reason please try again!",
|
||||
},
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<div className="m-2">
|
||||
<Label htmlFor="line" className="m-1">
|
||||
"What is the line number you are printing?"
|
||||
</Label>
|
||||
@@ -184,7 +197,7 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div className="m-2">
|
||||
<Label htmlFor="initials" className="m-1">
|
||||
Enter intials
|
||||
</Label>
|
||||
@@ -195,20 +208,52 @@ export default function ManualPrintForm({lot}: {lot: LotType}) {
|
||||
{...register("initials", { required: true })}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<hr />
|
||||
{dyco[0].value === "0" && (
|
||||
<div>
|
||||
<p>Enter the missing tag number.</p>
|
||||
<hr />
|
||||
<Label htmlFor="rfidTag" className="m-1">
|
||||
Enter the tag number only Example
|
||||
ALPLA000002541. only enter 2541
|
||||
</Label>
|
||||
<Input
|
||||
type="text"
|
||||
className={
|
||||
errors.printReason ? "border-red-500" : ""
|
||||
}
|
||||
aria-invalid={!!errors.printReason}
|
||||
{...register("rfidTag", {
|
||||
required: true,
|
||||
minLength: {
|
||||
value: 1,
|
||||
message: "Tag number is to short!",
|
||||
},
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div className="m-2">
|
||||
<Textarea
|
||||
//label="Comments"
|
||||
placeholder="add more info as needed."
|
||||
{...register("additionalComments")}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button color="danger" variant="default" onClick={() => setOpen(!open)}>
|
||||
<div className="mt-3">
|
||||
<Button
|
||||
color="danger"
|
||||
variant="default"
|
||||
onClick={closeForm}
|
||||
>
|
||||
Close
|
||||
</Button>
|
||||
<Button color="primary" type="submit">
|
||||
Print
|
||||
</Button>
|
||||
</div>
|
||||
</DialogFooter>
|
||||
</form>
|
||||
</DialogContent>
|
||||
|
||||
@@ -3,7 +3,7 @@ import axios from "axios";
|
||||
|
||||
export const manualPrintLabels = async (lot: LotType, user: any) => {
|
||||
//console.log(lot);
|
||||
const labelUrl = `/ocp/manualPrintAndFollow`;
|
||||
const labelUrl = `/api/ocp/manualprintandfollow`;
|
||||
|
||||
try {
|
||||
const res = await axios.post(
|
||||
@@ -19,7 +19,7 @@ export const manualPrintLabels = async (lot: LotType, user: any) => {
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: true,
|
||||
success: false,
|
||||
message: `Line ${lot.MachineDescription} encountered an error printing labels: ${res.data.message}`,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,154 @@
|
||||
import { LstCard } from "@/components/extendedUI/LstCard";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { getOcpLogs } from "@/utils/querys/production/ocpLogs";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import axios from "axios";
|
||||
import { format } from "date-fns";
|
||||
import { Trash } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
|
||||
const labelLogs = [
|
||||
{ key: "message", label: "Error Message" },
|
||||
{ key: "created_at", label: "ErrorDat" },
|
||||
{ key: "clear", label: "Clear" },
|
||||
//{key: "reprint", label: "Reprint"}, // removing the reprint button for now until repritning is working as intended
|
||||
];
|
||||
|
||||
export default function OcpLogs() {
|
||||
return <LstCard className="m-2 p-2">Ocp Logs</LstCard>;
|
||||
const { data, isError, isLoading } = useQuery(getOcpLogs("4"));
|
||||
|
||||
const clearLog = async (log: any) => {
|
||||
try {
|
||||
const res = await axios.patch(`/api/logger/logs/${log.log_id}`);
|
||||
|
||||
if (res.data.success) {
|
||||
toast.success(`Log message: ${log.message}, was just cleared`);
|
||||
} else {
|
||||
console.log(res);
|
||||
toast.error(`There was an error clearing the message.`);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(`There was an error trying to clearing the message.`);
|
||||
}
|
||||
};
|
||||
const logData = data ? data : [];
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="m-2 p-2 min-h-2/5">
|
||||
<LstCard>
|
||||
<p className="text-center">Labels for the last 2 hours</p>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
{labelLogs.map((l) => (
|
||||
<TableHead key={l.key}>{l.label}</TableHead>
|
||||
))}
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
|
||||
<TableBody>
|
||||
{Array(7)
|
||||
.fill(0)
|
||||
.map((_, i) => (
|
||||
<TableRow key={i}>
|
||||
<TableCell className="font-medium">
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</LstCard>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<LstCard className="m-2 p-2 min-h-2/5">
|
||||
<p className="text-center">Labels for the last 2 hours</p>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
{labelLogs.map((l) => (
|
||||
<TableHead key={l.key}>{l.label}</TableHead>
|
||||
))}
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
{isLoading ? (
|
||||
<>
|
||||
<TableBody>
|
||||
{Array(7)
|
||||
.fill(0)
|
||||
.map((_, i) => (
|
||||
<TableRow key={i}>
|
||||
<TableCell className="font-medium">
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Skeleton className="h-4" />
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</>
|
||||
) : (
|
||||
<TableBody>
|
||||
{logData.map((label: any) => (
|
||||
<TableRow key={label.log_id}>
|
||||
<TableCell className="font-medium max-w-5/6">
|
||||
<p className="text-balance">
|
||||
{label.message}
|
||||
</p>
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
{format(
|
||||
label?.created_at.replace("Z", ""),
|
||||
"M/d/yyyy hh:mm"
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell className="font-medium">
|
||||
<Button
|
||||
size="icon"
|
||||
onClick={() => clearLog(label)}
|
||||
>
|
||||
<Trash />
|
||||
</Button>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
)}
|
||||
</Table>
|
||||
</LstCard>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,30 +1,55 @@
|
||||
import WrapperManualTrigger from "@/components/ocme/WrapperCard";
|
||||
import LabelLog from "./LabelLog";
|
||||
import Lots from "./Lots";
|
||||
import OcpLogs from "./OcpLogs";
|
||||
import PrinterStatus from "./PrinterStatus";
|
||||
import { useSettingStore } from "@/lib/store/useSettings";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
|
||||
export default function OCPPage() {
|
||||
const { settings } = useSettingStore();
|
||||
|
||||
const server = settings.filter((n) => n.plantToken === "usday1");
|
||||
return (
|
||||
<div className="h-dvh w-full overflow-hidden">
|
||||
<div className="h-screen w-full ">
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<div className="flex flex-col w-4/5 h-dvh">
|
||||
<div className="">
|
||||
<Lots />
|
||||
</div>
|
||||
|
||||
<div className="flex flex-row">
|
||||
<div className="w-1/2">
|
||||
<LabelLog />
|
||||
</div>
|
||||
<div className="w-1/2">
|
||||
<div className="w-5/6 h-1/2">
|
||||
<Tabs defaultValue="ocplogs" className="w-full">
|
||||
<TabsList className="grid w-full grid-cols-2">
|
||||
<TabsTrigger value="ocplogs">
|
||||
OcpLogs
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="labels">Labels</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="ocplogs">
|
||||
<div className="w-full">
|
||||
<OcpLogs />
|
||||
</div>
|
||||
</TabsContent>
|
||||
<TabsContent value="labels">
|
||||
<div className="w-full">
|
||||
<LabelLog />
|
||||
</div>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
</div>
|
||||
</div>
|
||||
<div className="w-1/6">
|
||||
<div className="w-1/6 flex flex-col">
|
||||
{server && (
|
||||
<div>
|
||||
<WrapperManualTrigger />
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<PrinterStatus />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
import { LstCard } from "@/components/extendedUI/LstCard";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import {Table, TableBody, TableCell, TableHead, TableHeader, TableRow} from "@/components/ui/table";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
|
||||
let printerCols = [
|
||||
{
|
||||
@@ -19,7 +27,9 @@ let printerCols = [
|
||||
export default function PrinterStatus() {
|
||||
return (
|
||||
<LstCard className="m-2 p-2">
|
||||
<ScrollArea className="max-h-[300px]">
|
||||
<p className="text-center">Printer Status</p>
|
||||
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
@@ -30,7 +40,7 @@ export default function PrinterStatus() {
|
||||
</TableHeader>
|
||||
|
||||
<TableBody>
|
||||
{Array(10)
|
||||
{Array(5)
|
||||
.fill(0)
|
||||
.map((_, i) => (
|
||||
<TableRow key={i}>
|
||||
@@ -47,6 +57,7 @@ export default function PrinterStatus() {
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</ScrollArea>
|
||||
</LstCard>
|
||||
);
|
||||
}
|
||||
|
||||
64
frontend/src/components/ui/accordion.tsx
Normal file
64
frontend/src/components/ui/accordion.tsx
Normal file
@@ -0,0 +1,64 @@
|
||||
import * as React from "react"
|
||||
import * as AccordionPrimitive from "@radix-ui/react-accordion"
|
||||
import { ChevronDownIcon } from "lucide-react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Accordion({
|
||||
...props
|
||||
}: React.ComponentProps<typeof AccordionPrimitive.Root>) {
|
||||
return <AccordionPrimitive.Root data-slot="accordion" {...props} />
|
||||
}
|
||||
|
||||
function AccordionItem({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AccordionPrimitive.Item>) {
|
||||
return (
|
||||
<AccordionPrimitive.Item
|
||||
data-slot="accordion-item"
|
||||
className={cn("border-b last:border-b-0", className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function AccordionTrigger({
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AccordionPrimitive.Trigger>) {
|
||||
return (
|
||||
<AccordionPrimitive.Header className="flex">
|
||||
<AccordionPrimitive.Trigger
|
||||
data-slot="accordion-trigger"
|
||||
className={cn(
|
||||
"focus-visible:border-ring focus-visible:ring-ring/50 flex flex-1 items-start justify-between gap-4 rounded-md py-4 text-left text-sm font-medium transition-all outline-none hover:underline focus-visible:ring-[3px] disabled:pointer-events-none disabled:opacity-50 [&[data-state=open]>svg]:rotate-180",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<ChevronDownIcon className="text-muted-foreground pointer-events-none size-4 shrink-0 translate-y-0.5 transition-transform duration-200" />
|
||||
</AccordionPrimitive.Trigger>
|
||||
</AccordionPrimitive.Header>
|
||||
)
|
||||
}
|
||||
|
||||
function AccordionContent({
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AccordionPrimitive.Content>) {
|
||||
return (
|
||||
<AccordionPrimitive.Content
|
||||
data-slot="accordion-content"
|
||||
className="data-[state=closed]:animate-accordion-up data-[state=open]:animate-accordion-down overflow-hidden text-sm"
|
||||
{...props}
|
||||
>
|
||||
<div className={cn("pt-0 pb-4", className)}>{children}</div>
|
||||
</AccordionPrimitive.Content>
|
||||
)
|
||||
}
|
||||
|
||||
export { Accordion, AccordionItem, AccordionTrigger, AccordionContent }
|
||||
45
frontend/src/components/ui/resizable-panels.tsx
Normal file
45
frontend/src/components/ui/resizable-panels.tsx
Normal file
@@ -0,0 +1,45 @@
|
||||
"use client";
|
||||
|
||||
import { GripVertical } from "lucide-react";
|
||||
import * as ResizablePrimitive from "react-resizable-panels";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const ResizablePanelGroup = ({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof ResizablePrimitive.PanelGroup>) => (
|
||||
<ResizablePrimitive.PanelGroup
|
||||
className={cn(
|
||||
"flex h-full w-full data-[panel-group-direction=vertical]:flex-col",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
|
||||
const ResizablePanel = ResizablePrimitive.Panel;
|
||||
|
||||
const ResizableHandle = ({
|
||||
withHandle,
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof ResizablePrimitive.PanelResizeHandle> & {
|
||||
withHandle?: boolean;
|
||||
}) => (
|
||||
<ResizablePrimitive.PanelResizeHandle
|
||||
className={cn(
|
||||
"relative flex w-px items-center justify-center bg-border after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring focus-visible:ring-offset-1 data-[panel-group-direction=vertical]:h-px data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:-translate-y-1/2 data-[panel-group-direction=vertical]:after:translate-x-0 [&[data-panel-group-direction=vertical]>div]:rotate-90",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{withHandle && (
|
||||
<div className="z-10 flex h-4 w-3 items-center justify-center rounded-sm border bg-border">
|
||||
<GripVertical className="h-2.5 w-2.5" />
|
||||
</div>
|
||||
)}
|
||||
</ResizablePrimitive.PanelResizeHandle>
|
||||
);
|
||||
|
||||
export { ResizablePanelGroup, ResizablePanel, ResizableHandle };
|
||||
56
frontend/src/components/ui/scroll-area.tsx
Normal file
56
frontend/src/components/ui/scroll-area.tsx
Normal file
@@ -0,0 +1,56 @@
|
||||
import * as React from "react"
|
||||
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function ScrollArea({
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.ComponentProps<typeof ScrollAreaPrimitive.Root>) {
|
||||
return (
|
||||
<ScrollAreaPrimitive.Root
|
||||
data-slot="scroll-area"
|
||||
className={cn("relative", className)}
|
||||
{...props}
|
||||
>
|
||||
<ScrollAreaPrimitive.Viewport
|
||||
data-slot="scroll-area-viewport"
|
||||
className="ring-ring/10 dark:ring-ring/20 dark:outline-ring/40 outline-ring/50 size-full rounded-[inherit] transition-[color,box-shadow] focus-visible:ring-4 focus-visible:outline-1"
|
||||
>
|
||||
{children}
|
||||
</ScrollAreaPrimitive.Viewport>
|
||||
<ScrollBar />
|
||||
<ScrollAreaPrimitive.Corner />
|
||||
</ScrollAreaPrimitive.Root>
|
||||
)
|
||||
}
|
||||
|
||||
function ScrollBar({
|
||||
className,
|
||||
orientation = "vertical",
|
||||
...props
|
||||
}: React.ComponentProps<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>) {
|
||||
return (
|
||||
<ScrollAreaPrimitive.ScrollAreaScrollbar
|
||||
data-slot="scroll-area-scrollbar"
|
||||
orientation={orientation}
|
||||
className={cn(
|
||||
"flex touch-none p-px transition-colors select-none",
|
||||
orientation === "vertical" &&
|
||||
"h-full w-2.5 border-l border-l-transparent",
|
||||
orientation === "horizontal" &&
|
||||
"h-2.5 flex-col border-t border-t-transparent",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<ScrollAreaPrimitive.ScrollAreaThumb
|
||||
data-slot="scroll-area-thumb"
|
||||
className="bg-border relative flex-1 rounded-full"
|
||||
/>
|
||||
</ScrollAreaPrimitive.ScrollAreaScrollbar>
|
||||
)
|
||||
}
|
||||
|
||||
export { ScrollArea, ScrollBar }
|
||||
@@ -20,6 +20,7 @@ import { Route as IndexImport } from './routes/index'
|
||||
import { Route as OcpIndexImport } from './routes/ocp/index'
|
||||
import { Route as EomEomImport } from './routes/_eom/eom'
|
||||
import { Route as AuthProfileImport } from './routes/_auth/profile'
|
||||
import { Route as AdminUsersImport } from './routes/_admin/users'
|
||||
import { Route as AdminSettingsImport } from './routes/_admin/settings'
|
||||
import { Route as AdminServersImport } from './routes/_admin/servers'
|
||||
import { Route as AdminModulesImport } from './routes/_admin/modules'
|
||||
@@ -82,6 +83,12 @@ const AuthProfileRoute = AuthProfileImport.update({
|
||||
getParentRoute: () => AuthRoute,
|
||||
} as any)
|
||||
|
||||
const AdminUsersRoute = AdminUsersImport.update({
|
||||
id: '/users',
|
||||
path: '/users',
|
||||
getParentRoute: () => AdminRoute,
|
||||
} as any)
|
||||
|
||||
const AdminSettingsRoute = AdminSettingsImport.update({
|
||||
id: '/settings',
|
||||
path: '/settings',
|
||||
@@ -200,6 +207,13 @@ declare module '@tanstack/react-router' {
|
||||
preLoaderRoute: typeof AdminSettingsImport
|
||||
parentRoute: typeof AdminImport
|
||||
}
|
||||
'/_admin/users': {
|
||||
id: '/_admin/users'
|
||||
path: '/users'
|
||||
fullPath: '/users'
|
||||
preLoaderRoute: typeof AdminUsersImport
|
||||
parentRoute: typeof AdminImport
|
||||
}
|
||||
'/_auth/profile': {
|
||||
id: '/_auth/profile'
|
||||
path: '/profile'
|
||||
@@ -265,12 +279,14 @@ interface AdminRouteChildren {
|
||||
AdminModulesRoute: typeof AdminModulesRoute
|
||||
AdminServersRoute: typeof AdminServersRoute
|
||||
AdminSettingsRoute: typeof AdminSettingsRoute
|
||||
AdminUsersRoute: typeof AdminUsersRoute
|
||||
}
|
||||
|
||||
const AdminRouteChildren: AdminRouteChildren = {
|
||||
AdminModulesRoute: AdminModulesRoute,
|
||||
AdminServersRoute: AdminServersRoute,
|
||||
AdminSettingsRoute: AdminSettingsRoute,
|
||||
AdminUsersRoute: AdminUsersRoute,
|
||||
}
|
||||
|
||||
const AdminRouteWithChildren = AdminRoute._addFileChildren(AdminRouteChildren)
|
||||
@@ -305,6 +321,7 @@ export interface FileRoutesByFullPath {
|
||||
'/modules': typeof AdminModulesRoute
|
||||
'/servers': typeof AdminServersRoute
|
||||
'/settings': typeof AdminSettingsRoute
|
||||
'/users': typeof AdminUsersRoute
|
||||
'/profile': typeof AuthProfileRoute
|
||||
'/eom': typeof EomEomRoute
|
||||
'/ocp': typeof OcpIndexRoute
|
||||
@@ -323,6 +340,7 @@ export interface FileRoutesByTo {
|
||||
'/modules': typeof AdminModulesRoute
|
||||
'/servers': typeof AdminServersRoute
|
||||
'/settings': typeof AdminSettingsRoute
|
||||
'/users': typeof AdminUsersRoute
|
||||
'/profile': typeof AuthProfileRoute
|
||||
'/eom': typeof EomEomRoute
|
||||
'/ocp': typeof OcpIndexRoute
|
||||
@@ -344,6 +362,7 @@ export interface FileRoutesById {
|
||||
'/_admin/modules': typeof AdminModulesRoute
|
||||
'/_admin/servers': typeof AdminServersRoute
|
||||
'/_admin/settings': typeof AdminSettingsRoute
|
||||
'/_admin/users': typeof AdminUsersRoute
|
||||
'/_auth/profile': typeof AuthProfileRoute
|
||||
'/_eom/eom': typeof EomEomRoute
|
||||
'/ocp/': typeof OcpIndexRoute
|
||||
@@ -364,6 +383,7 @@ export interface FileRouteTypes {
|
||||
| '/modules'
|
||||
| '/servers'
|
||||
| '/settings'
|
||||
| '/users'
|
||||
| '/profile'
|
||||
| '/eom'
|
||||
| '/ocp'
|
||||
@@ -381,6 +401,7 @@ export interface FileRouteTypes {
|
||||
| '/modules'
|
||||
| '/servers'
|
||||
| '/settings'
|
||||
| '/users'
|
||||
| '/profile'
|
||||
| '/eom'
|
||||
| '/ocp'
|
||||
@@ -400,6 +421,7 @@ export interface FileRouteTypes {
|
||||
| '/_admin/modules'
|
||||
| '/_admin/servers'
|
||||
| '/_admin/settings'
|
||||
| '/_admin/users'
|
||||
| '/_auth/profile'
|
||||
| '/_eom/eom'
|
||||
| '/ocp/'
|
||||
@@ -472,7 +494,8 @@ export const routeTree = rootRoute
|
||||
"children": [
|
||||
"/_admin/modules",
|
||||
"/_admin/servers",
|
||||
"/_admin/settings"
|
||||
"/_admin/settings",
|
||||
"/_admin/users"
|
||||
]
|
||||
},
|
||||
"/_auth": {
|
||||
@@ -506,6 +529,10 @@ export const routeTree = rootRoute
|
||||
"filePath": "_admin/settings.tsx",
|
||||
"parent": "/_admin"
|
||||
},
|
||||
"/_admin/users": {
|
||||
"filePath": "_admin/users.tsx",
|
||||
"parent": "/_admin"
|
||||
},
|
||||
"/_auth/profile": {
|
||||
"filePath": "_auth/profile.tsx",
|
||||
"parent": "/_auth"
|
||||
|
||||
10
frontend/src/routes/_admin/users.tsx
Normal file
10
frontend/src/routes/_admin/users.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import UserPage from "@/components/admin/user/UserPage";
|
||||
import { createFileRoute } from "@tanstack/react-router";
|
||||
|
||||
export const Route = createFileRoute("/_admin/users")({
|
||||
component: RouteComponent,
|
||||
});
|
||||
|
||||
function RouteComponent() {
|
||||
return <UserPage />;
|
||||
}
|
||||
5
frontend/src/utils/formStuff/debugButton.tsx
Normal file
5
frontend/src/utils/formStuff/debugButton.tsx
Normal file
@@ -0,0 +1,5 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
||||
export const DebugButton = (data: any) => {
|
||||
return <Button onClick={() => console.log(data.data)}>Debug</Button>;
|
||||
};
|
||||
14
frontend/src/utils/formStuff/options/userformOptions.tsx
Normal file
14
frontend/src/utils/formStuff/options/userformOptions.tsx
Normal file
@@ -0,0 +1,14 @@
|
||||
import { formOptions } from "@tanstack/react-form";
|
||||
|
||||
export const userFormOptions = (user: any) => {
|
||||
return formOptions({
|
||||
defaultValues: {
|
||||
username: user.username,
|
||||
password: "",
|
||||
email: user.email,
|
||||
role: user.role,
|
||||
//hobbies: [],
|
||||
},
|
||||
// } as Person,
|
||||
});
|
||||
};
|
||||
27
frontend/src/utils/passwordGen.ts
Normal file
27
frontend/src/utils/passwordGen.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
export const generatePassword = (length: number) => {
|
||||
const uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
const lowercase = "abcdefghijklmnopqrstuvwxyz";
|
||||
const numbers = "0123456789";
|
||||
const symbols = "!@#$%&()_+-={}:,.<>?/"; // Safe symbol list
|
||||
|
||||
// Ensure the password contains at least one of each required type
|
||||
let password: any = [
|
||||
uppercase[Math.floor(Math.random() * uppercase.length)],
|
||||
lowercase[Math.floor(Math.random() * lowercase.length)],
|
||||
numbers[Math.floor(Math.random() * numbers.length)],
|
||||
symbols[Math.floor(Math.random() * symbols.length)],
|
||||
];
|
||||
|
||||
// Fill the rest of the password with random characters from all sets
|
||||
const allCharacters = uppercase + lowercase;
|
||||
for (let i = password.length; i < length; i++) {
|
||||
password.push(
|
||||
allCharacters[Math.floor(Math.random() * allCharacters.length)]
|
||||
);
|
||||
}
|
||||
|
||||
// Shuffle the password to avoid predictable patterns
|
||||
password = password.sort(() => Math.random() - 0.5).join("");
|
||||
|
||||
return password;
|
||||
};
|
||||
26
frontend/src/utils/querys/admin/users.tsx
Normal file
26
frontend/src/utils/querys/admin/users.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import { queryOptions } from "@tanstack/react-query";
|
||||
import axios from "axios";
|
||||
|
||||
export function getUsers() {
|
||||
const token = localStorage.getItem("auth_token");
|
||||
return queryOptions({
|
||||
queryKey: ["getUsers"],
|
||||
queryFn: () => fetchUsers(token),
|
||||
enabled: !!token, // Prevents query if token is null
|
||||
staleTime: 1000,
|
||||
//refetchInterval: 2 * 2000,
|
||||
refetchOnWindowFocus: true,
|
||||
});
|
||||
}
|
||||
|
||||
const fetchUsers = async (token: string | null) => {
|
||||
const { data } = await axios.get(`/api/auth/allusers`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
// if we are not localhost ignore the devDir setting.
|
||||
//const url: string = window.location.host.split(":")[0];
|
||||
return data.data ?? [];
|
||||
};
|
||||
@@ -7,13 +7,13 @@ export function getlabels(hours: string) {
|
||||
queryFn: () => fetchSettings(hours),
|
||||
|
||||
staleTime: 1000,
|
||||
//refetchInterval: 2500,
|
||||
refetchInterval: 2 * 2000,
|
||||
refetchOnWindowFocus: true,
|
||||
});
|
||||
}
|
||||
|
||||
const fetchSettings = async (hours: string) => {
|
||||
const {data} = await axios.get(`/api/v1/ocp/labels?hours=${hours}`);
|
||||
const { data } = await axios.get(`/api/ocp/getlabels?hours=${hours}`);
|
||||
// if we are not localhost ignore the devDir setting.
|
||||
//const url: string = window.location.host.split(":")[0];
|
||||
return data.data ?? [];
|
||||
|
||||
@@ -7,13 +7,13 @@ export function getlots() {
|
||||
queryFn: () => fetchSettings(),
|
||||
|
||||
staleTime: 10 * 1000,
|
||||
//refetchInterval: 10 * 1000,
|
||||
refetchInterval: 10 * 1000,
|
||||
refetchOnWindowFocus: true,
|
||||
});
|
||||
}
|
||||
|
||||
const fetchSettings = async () => {
|
||||
const {data} = await axios.get("/api/v1/ocp/lots");
|
||||
const { data } = await axios.get("/api/ocp/getlots");
|
||||
// if we are not localhost ignore the devDir setting.
|
||||
//const url: string = window.location.host.split(":")[0];
|
||||
let lotData = data.data;
|
||||
|
||||
22
frontend/src/utils/querys/production/ocpLogs.tsx
Normal file
22
frontend/src/utils/querys/production/ocpLogs.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
import { queryOptions } from "@tanstack/react-query";
|
||||
import axios from "axios";
|
||||
|
||||
export function getOcpLogs(hours: string) {
|
||||
return queryOptions({
|
||||
queryKey: ["ocpLogs"],
|
||||
queryFn: () => fetchSettings(hours),
|
||||
|
||||
staleTime: 1000,
|
||||
refetchInterval: 2 * 1000,
|
||||
refetchOnWindowFocus: true,
|
||||
});
|
||||
}
|
||||
|
||||
const fetchSettings = async (hours: string) => {
|
||||
const { data } = await axios.get(
|
||||
`/api/logger/logs?service=ocp&service=rfid&level=error&level=warn&hours=${hours}`
|
||||
);
|
||||
// if we are not localhost ignore the devDir setting.
|
||||
//const url: string = window.location.host.split(":")[0];
|
||||
return data.data ?? [];
|
||||
};
|
||||
865
package-lock.json
generated
865
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
23
package.json
23
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "lstv2",
|
||||
"version": "2.9.0",
|
||||
"version": "2.11.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently -n \"server,frontend\" -c \"#007755,#2f6da3\" \"npm run dev:server\" \"cd frontend && npm run dev\"",
|
||||
@@ -9,7 +9,7 @@
|
||||
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
|
||||
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
|
||||
"build": "npm run build:server && npm run build:frontend",
|
||||
"build:server": "rimraf dist && tsc --build && npm run copy:scripts",
|
||||
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y",
|
||||
"build:frontend": "cd frontend && npm run build",
|
||||
"copy:scripts": "tsx server/scripts/copyScripts.ts",
|
||||
"copy:servers": "xcopy server\\services\\server\\utils\\serverData.json dist\\server\\services\\server\\utils /E /I /Y",
|
||||
@@ -21,10 +21,13 @@
|
||||
"deploy": "standard-version --conventional-commits && npm run prodBuild",
|
||||
"zipServer": "dotenvx run -f .env -- tsx server/scripts/zipUpBuild.ts \"C:\\Users\\matthes01\\Documents\\lstv2\"",
|
||||
"v1Build": "cd C:\\Users\\matthes01\\Documents\\logisticsSupportTool && npm run oldBuilder",
|
||||
"prodBuild": "npm run v1Build && powershell -ExecutionPolicy Bypass -File server/scripts/build.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\" && npm run zipServer",
|
||||
"scriptBuild": "powershell -ExecutionPolicy Bypass -File server/scripts/build.ps1 -dir \"C:\\Users\\matthes01\\Documents\\lstv2\"",
|
||||
"removeOld": "rimraf dist && rimraf frontend/dist",
|
||||
"prodBuild": "npm run v1Build && npm run build && npm run zipServer && npm run dev",
|
||||
"commit": "cz",
|
||||
"prodinstall": "npm i --omit=dev && npm run db:migrate",
|
||||
"checkupdates": "npx npm-check-updates"
|
||||
"checkupdates": "npx npm-check-updates",
|
||||
"testingCode": "dotenvx run -f .env -- tsx watch database/testFiles/checkServerData.ts"
|
||||
},
|
||||
"config": {
|
||||
"commitizen": {
|
||||
@@ -32,7 +35,7 @@
|
||||
}
|
||||
},
|
||||
"admConfig": {
|
||||
"build": 50,
|
||||
"build": 147,
|
||||
"oldBuild": "backend-0.1.3.zip"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -42,12 +45,12 @@
|
||||
"@types/js-cookie": "^3.0.6",
|
||||
"@types/mssql": "^9.1.7",
|
||||
"@types/node": "^22.13.11",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/nodemailer": "^6.4.17",
|
||||
"@types/pg": "^8.11.11",
|
||||
"@types/ws": "^8.18.0",
|
||||
"concurrently": "^9.1.2",
|
||||
"cz-conventional-changelog": "^3.3.0",
|
||||
"drizzle-kit": "^0.30.5",
|
||||
"fs-extra": "^11.3.0",
|
||||
"standard-version": "^9.5.0",
|
||||
"tsx": "^4.19.3",
|
||||
"typescript": "^5.8.2"
|
||||
@@ -57,14 +60,19 @@
|
||||
"@hono/node-server": "^1.14.0",
|
||||
"@hono/zod-openapi": "^0.19.2",
|
||||
"@scalar/hono-api-reference": "^0.7.2",
|
||||
"@tanstack/react-form": "^1.2.1",
|
||||
"@types/jsonwebtoken": "^9.0.9",
|
||||
"@types/nodemailer-express-handlebars": "^4.0.5",
|
||||
"adm-zip": "^0.5.16",
|
||||
"axios": "^1.8.4",
|
||||
"bcryptjs": "^3.0.2",
|
||||
"croner": "^9.0.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"drizzle-kit": "^0.30.5",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"drizzle-zod": "^0.7.0",
|
||||
"fast-xml-parser": "^5.0.9",
|
||||
"fs-extra": "^11.3.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"mssql": "^11.0.1",
|
||||
"nodemailer": "^6.10.0",
|
||||
@@ -75,6 +83,7 @@
|
||||
"pino-pretty": "^13.0.0",
|
||||
"postgres": "^3.4.5",
|
||||
"rimraf": "^6.0.1",
|
||||
"st-ethernet-ip": "^2.7.3",
|
||||
"ws": "^8.18.1",
|
||||
"zod": "^3.24.2"
|
||||
}
|
||||
|
||||
69
server/globalUtils/freightClass.ts
Normal file
69
server/globalUtils/freightClass.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
export const freightClass = (
|
||||
weight: number,
|
||||
length: number,
|
||||
width: number,
|
||||
height: number
|
||||
) => {
|
||||
// mm to in conversion
|
||||
const convertMM = 25.4;
|
||||
|
||||
const convertKG = 2.20462;
|
||||
// Inputs
|
||||
const weightPounds = weight * convertKG;
|
||||
const lengthInches = length / convertMM;
|
||||
const widthInches = width / convertMM;
|
||||
const heightInches = height / convertMM;
|
||||
|
||||
// Calculate volume in cubic inches
|
||||
const volumeCubicInches = lengthInches * widthInches * heightInches;
|
||||
|
||||
// Convert cubic inches to cubic feet
|
||||
const volumeCubicFeet = volumeCubicInches / 1728;
|
||||
|
||||
// Calculate density
|
||||
const density = weightPounds / volumeCubicFeet;
|
||||
|
||||
// Determine freight class
|
||||
let freightClass;
|
||||
|
||||
if (density >= 50) {
|
||||
freightClass = 50;
|
||||
} else if (density >= 35) {
|
||||
freightClass = 55;
|
||||
} else if (density >= 30) {
|
||||
freightClass = 60;
|
||||
} else if (density >= 22.5) {
|
||||
freightClass = 65;
|
||||
} else if (density >= 15) {
|
||||
freightClass = 70;
|
||||
} else if (density >= 13.5) {
|
||||
freightClass = 77.5;
|
||||
} else if (density >= 12) {
|
||||
freightClass = 85;
|
||||
} else if (density >= 10.5) {
|
||||
freightClass = 92.5;
|
||||
} else if (density >= 9) {
|
||||
freightClass = 100;
|
||||
} else if (density >= 8) {
|
||||
freightClass = 110;
|
||||
} else if (density >= 7) {
|
||||
freightClass = 125;
|
||||
} else if (density >= 6) {
|
||||
freightClass = 150;
|
||||
} else if (density >= 5) {
|
||||
freightClass = 175;
|
||||
} else if (density >= 4) {
|
||||
freightClass = 200;
|
||||
} else if (density >= 3) {
|
||||
freightClass = 250;
|
||||
} else if (density >= 2) {
|
||||
freightClass = 300;
|
||||
} else if (density >= 1) {
|
||||
freightClass = 400;
|
||||
} else {
|
||||
freightClass = 500;
|
||||
}
|
||||
|
||||
// Output the freight class
|
||||
return freightClass;
|
||||
};
|
||||
@@ -1,12 +1,12 @@
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||
import { serve } from "@hono/node-server";
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
import { proxy } from "hono/proxy";
|
||||
|
||||
import { serveStatic } from "@hono/node-server/serve-static";
|
||||
import { logger } from "hono/logger";
|
||||
import { cors } from "hono/cors";
|
||||
import { createLog } from "./services/logger/logger.js";
|
||||
import { WebSocketServer } from "ws";
|
||||
|
||||
// custom routes
|
||||
import scalar from "./services/general/route/scalar.js";
|
||||
import system from "./services/server/systemServer.js";
|
||||
@@ -21,7 +21,12 @@ import loggerService from "./services/logger/loggerService.js";
|
||||
import ocpService from "./services/ocp/ocpService.js";
|
||||
import { db } from "../database/dbclient.js";
|
||||
import { settings } from "../database/schema/settings.js";
|
||||
import { count } from "drizzle-orm";
|
||||
import os from "os";
|
||||
import { tryCatch } from "./globalUtils/tryCatch.js";
|
||||
import { sendEmail } from "./services/notifications/controller/sendMail.js";
|
||||
import notify from "./services/notifications/notifyService.js";
|
||||
import eom from "./services/eom/eomService.js";
|
||||
import dataMart from "./services/dataMart/dataMartService.js";
|
||||
|
||||
// create the main prodlogin here
|
||||
const username = "lst_user";
|
||||
@@ -29,9 +34,17 @@ const password = "Alpla$$Prod";
|
||||
export const lstAuth = btoa(`${username}:${password}`);
|
||||
|
||||
// checking to make sure we have the settings intialized
|
||||
const serverIntialized = await db.select({ count: count() }).from(settings);
|
||||
const { data: settingsData, error: settingError } = await tryCatch(
|
||||
db.select().from(settings)
|
||||
);
|
||||
|
||||
if (settingError) {
|
||||
throw Error("Error getting settings from the db. critical error.");
|
||||
}
|
||||
|
||||
const serverIntialized: any = settingsData;
|
||||
export const installed =
|
||||
serverIntialized[0].count === 0 && process.env.NODE_ENV !== "development"
|
||||
serverIntialized.length === 0 && process.env.NODE_ENV !== "development"
|
||||
? false
|
||||
: true;
|
||||
createLog("info", "LST", "server", `Server is installed: ${installed}`);
|
||||
@@ -39,7 +52,10 @@ createLog("info", "LST", "server", `Server is installed: ${installed}`);
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
|
||||
// middle ware
|
||||
if (process.env.NODE_ENV === "development") {
|
||||
app.use("*", logger());
|
||||
}
|
||||
|
||||
app.use(
|
||||
"*",
|
||||
cors({
|
||||
@@ -53,17 +69,17 @@ app.use(
|
||||
);
|
||||
|
||||
// Middleware to normalize route case
|
||||
app.use("*", async (c, next) => {
|
||||
const lowercasedUrl = c.req.url.toLowerCase();
|
||||
// app.use("*", async (c, next) => {
|
||||
// // const lowercasedUrl = c.req.url.toLowerCase();
|
||||
// console.log("Incoming Request:", c.req.url, c.req.method);
|
||||
// If the URL is already lowercase, continue as usual
|
||||
if (c.req.url === lowercasedUrl) {
|
||||
return next();
|
||||
}
|
||||
// // // If the URL is already lowercase, continue as usual
|
||||
// // if (c.req.url === lowercasedUrl) {
|
||||
// await next();
|
||||
// // }
|
||||
|
||||
// Otherwise, re-route internally
|
||||
return c.redirect(lowercasedUrl, 308); // 308 preserves the HTTP method
|
||||
});
|
||||
// // // Otherwise, re-route internally
|
||||
// // return c.redirect(lowercasedUrl, 308); // 308 preserves the HTTP method
|
||||
// });
|
||||
|
||||
app.doc("/api/ref", {
|
||||
openapi: "3.0.0",
|
||||
@@ -85,6 +101,9 @@ const routes = [
|
||||
printers,
|
||||
loggerService,
|
||||
ocpService,
|
||||
notify,
|
||||
eom,
|
||||
dataMart,
|
||||
] as const;
|
||||
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
@@ -143,7 +162,18 @@ process.on("SIGTERM", async () => {
|
||||
process.on("uncaughtException", async (err) => {
|
||||
console.log("Uncaught Exception:", err);
|
||||
//await closePool();
|
||||
process.exit(1);
|
||||
const emailData = {
|
||||
email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused.
|
||||
subject: `${os.hostname()} has just encountered a crash.`,
|
||||
template: "serverCrash",
|
||||
context: {
|
||||
error: err,
|
||||
plant: `${os.hostname()}`,
|
||||
},
|
||||
};
|
||||
|
||||
await sendEmail(emailData);
|
||||
//process.exit(1);
|
||||
});
|
||||
|
||||
process.on("beforeExit", async () => {
|
||||
|
||||
@@ -53,14 +53,12 @@ add in the below and change each setting area that says change me to something t
|
||||
|
||||
```env
|
||||
# PORTS
|
||||
PROD_PORT=4000
|
||||
# To keep it all simple we will pass VITE to the ports that are used on both sides.
|
||||
VITE_SERVER_PORT=4400
|
||||
VITE_SERVER_PORT=4000
|
||||
|
||||
# logLevel
|
||||
LOG_LEVEL=debug
|
||||
PROD_PORT=4000
|
||||
# DUE to lstv1 we need 3000
|
||||
SEC_PORT=3000
|
||||
LOG_LEVEL=info
|
||||
# Auth stuff
|
||||
SALTING=12
|
||||
SECRET=CHANGEME
|
||||
@@ -138,6 +136,36 @@ Next use the example command below to get the service up and running.
|
||||
.\services.ps1 -serviceName "LSTV2" -option "install" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
|
||||
```
|
||||
|
||||
### Adding servers to the mix to update on from the front end
|
||||
|
||||
you will need to add your servers into the serverData.json.
|
||||
when the server starts up it will look at this file and make changes as needed.
|
||||
below is an example of the server
|
||||
|
||||
```JSON
|
||||
{
|
||||
"sName": "Kansas City",
|
||||
"serverDNS": "usksc1vms006",
|
||||
"plantToken": "usksc1",
|
||||
"idAddress": "10.42.9.26",
|
||||
"greatPlainsPlantCode": "85",
|
||||
"streetAddress": "1800 E 94th St Suite 300",
|
||||
"cityState": "Kansas City, MO",
|
||||
"zipcode": "64131",
|
||||
"contactEmail": "example@example.com",
|
||||
"contactPhone": "555-555-5555",
|
||||
"customerTiAcc": "ALPL01KCINT",
|
||||
"lstServerPort": "4000",
|
||||
"active": false,
|
||||
"serverLoc": "E:\\LST\\lstv2",
|
||||
"oldVersion": "E:\\LST\\lst_backend",
|
||||
"shippingHours": "[{\"early\": \"06:30\", \"late\": \"23:00\"}]",
|
||||
"tiPostTime": "[{\"from\": \"24\", \"to\": \"24\"}]",
|
||||
"otherSettings": [{ "specialInstructions": "" }]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
# Migrating From V1 to V2
|
||||
|
||||
## User migration
|
||||
|
||||
@@ -4,14 +4,37 @@ param (
|
||||
[string]$appPath,
|
||||
[string]$command, # just the command like run startadm or what ever you have in npm.
|
||||
[string]$description
|
||||
[string]$remote
|
||||
)
|
||||
|
||||
# Example string to run with the parameters in it.
|
||||
# .\services.ps1 -serviceName "LSTV2" -option "install" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
|
||||
|
||||
|
||||
### the fix
|
||||
# .\services.ps1 -serviceName "LST-App" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
|
||||
# .\services.ps1 -serviceName "LST-frontend" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
|
||||
# .\services.ps1 -serviceName "LST-System" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
|
||||
# .\services.ps1 -serviceName "LST-Gateway" -option "delete" -appPath "E:\LST\lstV2" -description "Logistics Support Tool V2" -command "run start"
|
||||
|
||||
# .\services.ps1 -serviceName "LST-App" -option "install" -appPath "E:\LST\lst_backend" -description "Logistics Support Tool V2" -command "run startapp"
|
||||
# .\services.ps1 -serviceName "LST-frontend" -option "install" -appPath "E:\LST\lst_backend" -description "Logistics Support Tool V2" -command "run startfront"
|
||||
|
||||
|
||||
|
||||
$nssmPath = $AppPath + "\nssm.exe"
|
||||
$npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd
|
||||
|
||||
if($remote -eq "true"){
|
||||
$plantFunness = {
|
||||
param ($service, $processType, $location)
|
||||
# Call your PowerShell script inside plantFunness
|
||||
& "$($location)\dist\server\scripts\services.ps1" -serviceName $service -option $processType -appPath $location
|
||||
}
|
||||
|
||||
Invoke-Command -ComputerName $server -ScriptBlock $plantFunness -ArgumentList $service, $option, $appPath -Credential $credentials
|
||||
}
|
||||
|
||||
if (-not ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")) {
|
||||
Write-Host "Error: This script must be run as Administrator."
|
||||
exit 1
|
||||
|
||||
@@ -85,8 +85,23 @@ $plantFunness = {
|
||||
$localPath = $location -replace '\$', ':'
|
||||
$serverFile = "$($localPath)\$buildFile"
|
||||
$serverPath = "$($localPath)"
|
||||
$appPath = $extractedFolderPath
|
||||
$nssmPath = $serverPath + "\nssm.exe"
|
||||
$npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd
|
||||
|
||||
|
||||
Write-Host "In the plant we go!!!!!"
|
||||
|
||||
######################################################################################
|
||||
# Removing the fist and frontend folder to make sure we keep them the same and clean.
|
||||
######################################################################################
|
||||
|
||||
# Delete the directories after extraction
|
||||
Write-Host "Deleting Dist and Frontend..."
|
||||
|
||||
Set-Location $serverPath
|
||||
npm run removeOld # --omit=dev
|
||||
|
||||
Write-Host "Unzipping the folder..."
|
||||
|
||||
$extractedFolderPath = $serverPath
|
||||
@@ -117,6 +132,17 @@ $plantFunness = {
|
||||
exit 1 # Exit with a non-zero code if there's an error
|
||||
}
|
||||
|
||||
# for iowa 2 need to change the port config on the start up of nextjs server
|
||||
if($token -eq "usiow2"){
|
||||
$jsonPkgloc = "$($obslst)\apps\frontend\package.json"
|
||||
#read the file
|
||||
$jsonContent = Get-Content -Path $jsonPkgloc | ConvertFrom-Json
|
||||
#change the second we want to update
|
||||
$jsonContent.scripts.start = "next start -p 3001"
|
||||
# convert back to json
|
||||
$jsonContent | ConvertTo-Json | Set-Content -Path $jsonPkgloc
|
||||
}
|
||||
|
||||
############################################################################
|
||||
Write-Host "Stopping the services to do the updates, pkgs and db changes."
|
||||
|
||||
@@ -162,10 +188,6 @@ $plantFunness = {
|
||||
# Service removoal and making sure we have the new version added
|
||||
#################################################################
|
||||
|
||||
$appPath = $extractedFolderPath
|
||||
$nssmPath = $serverPath + "\nssm.exe"
|
||||
$npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd
|
||||
|
||||
#################################################################
|
||||
# Removing all the old services
|
||||
#################################################################
|
||||
@@ -199,7 +221,9 @@ $plantFunness = {
|
||||
Set-Location $serverPath
|
||||
npm run prodinstall # --omit=dev
|
||||
Write-Host "Finished doing updates"
|
||||
Start-Sleep -Seconds 1
|
||||
# Start-Sleep -Seconds 1
|
||||
# Write-HOst "Running db migrations"
|
||||
# npm run db:migrate
|
||||
|
||||
###########################################################
|
||||
# Old system still active until we have everything off it
|
||||
@@ -336,14 +360,14 @@ try {
|
||||
$gatewayport = "4400"
|
||||
$systemport = "4200"
|
||||
$ocmeport = "4300"
|
||||
$appport = "4900"
|
||||
$appport = "4400"
|
||||
|
||||
if ($token -eq "usiow2") {
|
||||
$dbLink = "lstBackendDB_2"
|
||||
$gatewayport = "4401"
|
||||
$systemport = "4201"
|
||||
$ocmeport = "4301"
|
||||
$appport = "4901"
|
||||
$appport = "4401"
|
||||
}
|
||||
|
||||
if ($token -in @("test1", "test2", "test3")) {
|
||||
@@ -395,13 +419,13 @@ try {
|
||||
###########################################################
|
||||
# Starting the services back up.
|
||||
###########################################################
|
||||
Write-Host "Starting the services"
|
||||
Write-Host "Starting $($serviceSystem)"
|
||||
Start-Service -DisplayName $serviceSystem
|
||||
Start-Sleep -Seconds 1
|
||||
Write-Host "Starting $($serviceGateway)"
|
||||
Start-Service -DisplayName $serviceGateway
|
||||
Start-Sleep -Seconds 1
|
||||
# Write-Host "Starting the services"
|
||||
# Write-Host "Starting $($serviceSystem)"
|
||||
# Start-Service -DisplayName $serviceSystem
|
||||
# Start-Sleep -Seconds 1
|
||||
# Write-Host "Starting $($serviceGateway)"
|
||||
# Start-Service -DisplayName $serviceGateway
|
||||
# Start-Sleep -Seconds 1
|
||||
#Write-Host "Starting $($serviceAuth)"
|
||||
#Start-Service -DisplayName $serviceAuth
|
||||
#Start-Sleep -Seconds 1
|
||||
@@ -415,10 +439,10 @@ try {
|
||||
Start-Service -DisplayName $serviceLstV2
|
||||
Start-Sleep -Seconds 1
|
||||
Write-Host "$($server) finished updating"
|
||||
if($token -eq "usday1"){
|
||||
Write-Host "Starting $($serviceOcme)"
|
||||
Start-Service -DisplayName $serviceOcme
|
||||
}
|
||||
# if($token -eq "usday1"){
|
||||
# Write-Host "Starting $($serviceOcme)"
|
||||
# Start-Service -DisplayName $serviceOcme
|
||||
# }
|
||||
|
||||
}
|
||||
Invoke-Command -ComputerName $server -ScriptBlock $plantFunness -ArgumentList $server, $token, $location, $buildZip, $buildLoc, $obslst, $obsBuild -Credential $credentials
|
||||
22
server/scripts/updatePermissions.ps1
Normal file
22
server/scripts/updatePermissions.ps1
Normal file
@@ -0,0 +1,22 @@
|
||||
# Define the array of folders
|
||||
$folders = @(
|
||||
"AlplaBasis",
|
||||
"AlplaBudget",
|
||||
"AlplaINVOICE",
|
||||
"AlplaLabel",
|
||||
"AlplaOrder",
|
||||
"AlplaPlanning",
|
||||
"AlplaPurchase",
|
||||
"AlplaStock",
|
||||
"PDF24",
|
||||
"Module shortcuts"
|
||||
)
|
||||
|
||||
# Set permissions using icacls
|
||||
$permissions = "Everyone:(OI)(CI)F"
|
||||
|
||||
# Loop through each folder and set permissions
|
||||
foreach ($folder in $folders) {
|
||||
$folderPath = "C:\Sources\AlplaPROD\$folder"
|
||||
icacls $folderPath /grant $permissions /t /c /q
|
||||
}
|
||||
@@ -10,7 +10,10 @@ type UpdateServerResponse = {
|
||||
message: string;
|
||||
};
|
||||
|
||||
export const updateServer = async (devApp: string, server: string | null): Promise<UpdateServerResponse> => {
|
||||
export const updateServer = async (
|
||||
devApp: string,
|
||||
server: string | null
|
||||
): Promise<UpdateServerResponse> => {
|
||||
const app = await getAppInfo(devApp);
|
||||
const serverInfo = await db
|
||||
.select()
|
||||
@@ -26,7 +29,8 @@ export const updateServer = async (devApp: string, server: string | null): Promi
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: "Looks like you are missing the plant token or have entered an incorrect one please try again.",
|
||||
message:
|
||||
"Looks like you are missing the plant token or have entered an incorrect one please try again.",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -110,7 +114,12 @@ export const updateServer = async (devApp: string, server: string | null): Promi
|
||||
await db
|
||||
.update(serverData)
|
||||
.set({ lastUpdated: sql`NOW()`, isUpgrading: false })
|
||||
.where(eq(serverData.plantToken, server?.toLowerCase() ?? ""));
|
||||
.where(
|
||||
eq(
|
||||
serverData.plantToken,
|
||||
server?.toLowerCase() ?? ""
|
||||
)
|
||||
);
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
@@ -156,17 +165,35 @@ export const updateServer = async (devApp: string, server: string | null): Promi
|
||||
export async function processAllServers(devApp: string) {
|
||||
const servers = await db.select().from(serverData);
|
||||
|
||||
createLog("info", "lst", "serverUpdater", `Running the update on all servers`);
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"serverUpdater",
|
||||
`Running the update on all servers`
|
||||
);
|
||||
let count = 1;
|
||||
for (const server of servers) {
|
||||
try {
|
||||
const updateToServer = await updateServer(devApp, server.plantToken);
|
||||
createLog("info", "lst", "serverUpdater", `${server.sName} was updated.`);
|
||||
const updateToServer = await updateServer(
|
||||
devApp,
|
||||
server.plantToken
|
||||
);
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"serverUpdater",
|
||||
`${server.sName} was updated.`
|
||||
);
|
||||
count = count + 1;
|
||||
|
||||
//return {success: true, message: `${server.sName} was updated.`, data: updateToServer};
|
||||
} catch (error: any) {
|
||||
createLog("info", "lst", "serverUpdater", `Error updating ${server.sName}: ${error.message}`);
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"serverUpdater",
|
||||
`Error updating ${server.sName}: ${error.message}`
|
||||
);
|
||||
//return {success: false, message: `Error updating ${server.sName}: ${error.message}`};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,7 +88,11 @@ const updateBuildNumber = (appLock: string) => {
|
||||
pkgJson.admConfig.build += 1;
|
||||
|
||||
// Write the updated data back
|
||||
fs.writeFileSync(packagePath, JSON.stringify(pkgJson, null, 2), "utf8");
|
||||
fs.writeFileSync(
|
||||
packagePath,
|
||||
JSON.stringify(pkgJson, null, 2),
|
||||
"utf8"
|
||||
);
|
||||
|
||||
createLog(
|
||||
"info",
|
||||
@@ -99,7 +103,7 @@ const updateBuildNumber = (appLock: string) => {
|
||||
// Auto-commit changes
|
||||
execSync("git add package.json");
|
||||
execSync(
|
||||
`git commit -m "build: bump build number to ${pkgJson.admConfig.build}"`
|
||||
`git commit -m "chore(release): bump build number to ${pkgJson.admConfig.build}"`
|
||||
);
|
||||
} else {
|
||||
createLog(
|
||||
@@ -162,12 +166,17 @@ export const createZip = async (appLock: string) => {
|
||||
`app Files (sorted by time):", ${JSON.stringify(appFiles)}`
|
||||
);
|
||||
|
||||
if (appFiles.length > 5) {
|
||||
appFiles.slice(0, -5).forEach((file) => {
|
||||
if (appFiles.length > 20) {
|
||||
appFiles.slice(0, -20).forEach((file) => {
|
||||
const filePath = path.join(destPath, file.name);
|
||||
try {
|
||||
fs.unlinkSync(filePath);
|
||||
createLog("info", "lst", "zipUpBuild", `Deleted: ${file.name}`);
|
||||
createLog(
|
||||
"info",
|
||||
"lst",
|
||||
"zipUpBuild",
|
||||
`Deleted: ${file.name}`
|
||||
);
|
||||
} catch (error: any) {
|
||||
createLog(
|
||||
"error",
|
||||
|
||||
@@ -3,12 +3,20 @@ import {db} from "../../../../database/dbclient.js";
|
||||
import { users } from "../../../../database/schema/users.js";
|
||||
import { createPassword } from "../utils/createPassword.js";
|
||||
import { setSysAdmin } from "./userRoles/setSysAdmin.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
|
||||
export const registerUser = async (username: string, password: string, email: string) => {
|
||||
export const registerUser = async (
|
||||
username: string,
|
||||
password: string,
|
||||
email: string
|
||||
) => {
|
||||
const usercount = await db.select().from(users);
|
||||
|
||||
// make sure the user dose not already exist in the system
|
||||
const userCheck = await db.select().from(users).where(eq(users.username, username));
|
||||
const userCheck = await db
|
||||
.select()
|
||||
.from(users)
|
||||
.where(eq(users.username, username));
|
||||
|
||||
if (userCheck.length === 1) {
|
||||
return {
|
||||
@@ -30,14 +38,22 @@ export const registerUser = async (username: string, password: string, email: st
|
||||
.returning({ user: users.username, email: users.email });
|
||||
|
||||
if (usercount.length <= 1) {
|
||||
console.log(`${username} is the first user and will be set to system admin.`);
|
||||
const updateUser = await db.select().from(users).where(eq(users.username, username));
|
||||
createLog(
|
||||
"info",
|
||||
"auth",
|
||||
"auth",
|
||||
`${username} is the first user and will be set to system admin.`
|
||||
);
|
||||
const updateUser = await db
|
||||
.select()
|
||||
.from(users)
|
||||
.where(eq(users.username, username));
|
||||
setSysAdmin(updateUser, "systemAdmin");
|
||||
}
|
||||
|
||||
return { sucess: true, message: "User Registered", user };
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
createLog("error", "auth", "auth", `${error}`);
|
||||
return {
|
||||
success: false,
|
||||
message: `${username} already exists please login or reset password, if you feel this is an error please contact your admin.`,
|
||||
|
||||
@@ -5,6 +5,7 @@ import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import type { User } from "../../../../types/users.js";
|
||||
import { createPassword } from "../../utils/createPassword.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { sendEmail } from "../../../notifications/controller/sendMail.js";
|
||||
|
||||
export const updateUserADM = async (userData: User) => {
|
||||
/**
|
||||
@@ -12,6 +13,7 @@ export const updateUserADM = async (userData: User) => {
|
||||
* password, username, email.
|
||||
*/
|
||||
|
||||
console.log(userData);
|
||||
createLog(
|
||||
"info",
|
||||
"apiAuthedRoute",
|
||||
@@ -45,6 +47,7 @@ export const updateUserADM = async (userData: User) => {
|
||||
username: userData.username ? userData.username : upd_user?.username,
|
||||
password: password,
|
||||
email: userData.email ? userData.email : upd_user.email,
|
||||
role: userData.role ? userData.role : upd_user.role,
|
||||
};
|
||||
|
||||
// term ? ilike(posts.title, term) : undefined
|
||||
@@ -60,6 +63,19 @@ export const updateUserADM = async (userData: User) => {
|
||||
};
|
||||
}
|
||||
|
||||
if (userData?.password!.length > 0) {
|
||||
// send this user an email so they have the randomized password.
|
||||
await sendEmail({
|
||||
email: user[0]?.email,
|
||||
subject: "LST - Password reset.",
|
||||
template: "passwordReset",
|
||||
context: {
|
||||
password: userData.password!,
|
||||
username: user[0].username!,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `${userData.username} has been updated.`,
|
||||
|
||||
@@ -48,10 +48,19 @@ app.openapi(
|
||||
//apiHit(c, { endpoint: "api/auth/setUserRoles" });
|
||||
const { username, module, role, override } = await c.req.json();
|
||||
try {
|
||||
const access = await setUserAccess(username, module, role, override);
|
||||
const access = await setUserAccess(
|
||||
username,
|
||||
module,
|
||||
role,
|
||||
override
|
||||
);
|
||||
//return apiReturn(c, true, access?.message, access?.data, 200);
|
||||
return c.json(
|
||||
{ success: access.success, message: access.message, data: access.data },
|
||||
{
|
||||
success: access.success,
|
||||
message: access.message,
|
||||
data: access.data,
|
||||
},
|
||||
200
|
||||
);
|
||||
} catch (error) {
|
||||
|
||||
@@ -29,13 +29,7 @@ const UserAccess = z.object({
|
||||
.openapi({ example: "smith@example.com" }),
|
||||
password: z
|
||||
.string()
|
||||
.min(6, { message: "Passwords must be longer than 3 characters" })
|
||||
.regex(/[A-Z]/, {
|
||||
message: "Password must contain at least one uppercase letter",
|
||||
})
|
||||
.regex(/[\W_]/, {
|
||||
message: "Password must contain at least one special character",
|
||||
})
|
||||
|
||||
.optional()
|
||||
.openapi({ example: "Password1!" }),
|
||||
});
|
||||
@@ -44,7 +38,7 @@ app.openapi(
|
||||
createRoute({
|
||||
tags: ["Auth:admin"],
|
||||
summary: "updates a specific user",
|
||||
method: "post",
|
||||
method: "patch",
|
||||
path: "/updateuser",
|
||||
middleware: [
|
||||
authMiddleware,
|
||||
|
||||
13
server/services/dataMart/controller/getActiveArticles.ts
Normal file
13
server/services/dataMart/controller/getActiveArticles.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import { activeArticle } from "../../sqlServer/querys/dataMart/article.js";
|
||||
|
||||
export const getActiveAv = async () => {
|
||||
let articles: any = [];
|
||||
try {
|
||||
articles = await query(activeArticle, "Get active articles");
|
||||
} catch (error) {
|
||||
articles = error;
|
||||
}
|
||||
|
||||
return articles;
|
||||
};
|
||||
23
server/services/dataMart/controller/getinventory.ts
Normal file
23
server/services/dataMart/controller/getinventory.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import {
|
||||
totalInvNoRn,
|
||||
totalInvRn,
|
||||
} from "../../sqlServer/querys/dataMart/totalINV.js";
|
||||
|
||||
export const getINV = async () => {
|
||||
let inventory: any = [];
|
||||
|
||||
let updatedQuery = totalInvNoRn;
|
||||
|
||||
try {
|
||||
inventory = await query(updatedQuery, "Gets Curruent inv");
|
||||
return { success: true, message: "Current inv", data: inventory };
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error getting the inventory",
|
||||
data: error,
|
||||
};
|
||||
}
|
||||
};
|
||||
14
server/services/dataMart/dataMartService.ts
Normal file
14
server/services/dataMart/dataMartService.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
import activequerys from "./route/getCurrentQuerys.js";
|
||||
import getArticles from "./route/getActiveArticles.js";
|
||||
import currentInv from "./route/getInventory.js";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const routes = [activequerys, getArticles, currentInv] as const;
|
||||
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
app.route("/datamart", route);
|
||||
});
|
||||
|
||||
export default app;
|
||||
47
server/services/dataMart/route/getActiveArticles.ts
Normal file
47
server/services/dataMart/route/getActiveArticles.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
import { getActiveAv } from "../controller/getActiveArticles.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
const EomStat = z.object({
|
||||
plant: z.string().openapi({ example: "Salt Lake City" }),
|
||||
userRan: z.string().openapi({ example: "smith034" }),
|
||||
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
|
||||
});
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["dataMart"],
|
||||
summary: "Returns all the Active articles.",
|
||||
method: "get",
|
||||
path: "/getarticles",
|
||||
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
//const body = await c.req.json();
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
//apiHit(c, { endpoint: `api/logger/logs/id` });
|
||||
try {
|
||||
return c.json(
|
||||
{
|
||||
success: true,
|
||||
message: "Current active Articles",
|
||||
data: await getActiveAv(),
|
||||
},
|
||||
200
|
||||
);
|
||||
} catch (error) {
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error posting the eom stat.",
|
||||
data: error,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
102
server/services/dataMart/route/getCurrentQuerys.ts
Normal file
102
server/services/dataMart/route/getCurrentQuerys.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
const current: any = [
|
||||
{
|
||||
name: "getActiveAv",
|
||||
endpoint: "/api/datamart/getarticles",
|
||||
description: "Gets all current active AV, with specific critiera.",
|
||||
},
|
||||
// {
|
||||
// name: "getStockLaneDims",
|
||||
// endpoint: "/api/v1/masterData/getStockDims",
|
||||
// description: "Returns the lane dims along with a column to send actaul dims to be updated.",
|
||||
// },
|
||||
// {
|
||||
// name: "getAddressInfo",
|
||||
// endpoint: "/api/v1/masterData/getAddressInfo",
|
||||
// description: "Returns current active addresses with street and zip",
|
||||
// },
|
||||
// {
|
||||
// name: "getMissingPkgData",
|
||||
// endpoint: "/api/v1/masterData/getMissingPKGData",
|
||||
// description: "Returns all packaging data that is missing either printer, layout, or carton layout",
|
||||
// },
|
||||
// {
|
||||
// name: "getCustomerInventory",
|
||||
// endpoint: "/api/v1/masterData/getCustomerInventory",
|
||||
// description: "Returns specific customer inventory based on there address ID.",
|
||||
// criteria: "customer",
|
||||
// },
|
||||
// {
|
||||
// name: "getPalletLabels",
|
||||
// endpoint: "/api/v1/masterData/getPalletLabels",
|
||||
// description: "Returns specific amount of pallets RN, Needs label number and printer, Specfic to Dayton.",
|
||||
// criteria: "runningNumber,printerName,count",
|
||||
// },
|
||||
// {
|
||||
// name: "getOpenOrders",
|
||||
// endpoint: "/api/v1/masterData/getOpenOrders",
|
||||
// description:
|
||||
// "Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
|
||||
// criteria: "sDay,eDay",
|
||||
// },
|
||||
// {
|
||||
// name: "getOpenIncoming",
|
||||
// endpoint: "/api/v1/masterData/getOpenIncoming",
|
||||
// description:
|
||||
// "Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
|
||||
// criteria: "sDay,eDay",
|
||||
// },
|
||||
// {
|
||||
// name: "planningCheckPkg",
|
||||
// endpoint: "/api/v1/masterData/planningPkgCheck",
|
||||
// description: "Returns all lots starting later than today and has a pkg that is missing layouts.",
|
||||
// },
|
||||
{
|
||||
name: "getinventory",
|
||||
endpoint: "/api/datamart/getinventory",
|
||||
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
|
||||
description:
|
||||
"Returns all inventory, excludes inv locations. no running numbers",
|
||||
//criteria: "includeRunnningNumbers", // uncomment this out once the improt process can be faster
|
||||
},
|
||||
// {
|
||||
// name: "getOpenOrderUpdates",
|
||||
// endpoint: "/api/v1/masterData/getOpenOrderUpdates",
|
||||
// // description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
|
||||
// description: "Returns all orders based on customer id, leaving empty will pull everythinng in.",
|
||||
// criteria: "customer", // uncomment this out once the improt process can be faster
|
||||
// },
|
||||
// {
|
||||
// name: "getSiloAdjustment",
|
||||
// endpoint: "/api/v1/warehouse/getSiloAdjustment",
|
||||
// // description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
|
||||
// description: "Returns all siloadjustments in selected date range IE: 1/1/2025 to 1/31/2025",
|
||||
// criteria: "startDate,endDate", // uncomment this out once the improt process can be faster
|
||||
// },
|
||||
];
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["dataMart"],
|
||||
summary: "Returns all avalible querys.",
|
||||
method: "get",
|
||||
path: "/getavalibleaquerys",
|
||||
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
//const body = await c.req.json();
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
//apiHit(c, { endpoint: `api/logger/logs/id` });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: "All Current Active Querys.",
|
||||
data: current,
|
||||
});
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
54
server/services/dataMart/route/getInventory.ts
Normal file
54
server/services/dataMart/route/getInventory.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { getINV } from "../controller/getinventory.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["dataMart"],
|
||||
summary: "Returns All current inventory.",
|
||||
method: "get",
|
||||
path: "/getinventory",
|
||||
// request: {
|
||||
// body: {
|
||||
// content: {
|
||||
// "application/json": { schema: Body },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
// const { data: body, error } = await c.req.json();
|
||||
|
||||
// if (error) {
|
||||
// return c.json({
|
||||
// success: false,
|
||||
// message: "Missing data please try again.",
|
||||
// });
|
||||
// }
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
//apiHit(c, { endpoint: `api/logger/logs/id` });
|
||||
const { data, error } = await tryCatch(getINV());
|
||||
|
||||
if (error) {
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error getting the inv.",
|
||||
data: error,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
return c.json({
|
||||
success: data.success,
|
||||
message: data.message,
|
||||
data: data.data,
|
||||
});
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
139
server/services/eom/controller/addHistorical.ts
Normal file
139
server/services/eom/controller/addHistorical.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
// import cron from "node-cron";
|
||||
// import {runQuery, prisma, totalInvNoRn, activeArticle, getShiftTime, historicalInv} from "database";
|
||||
// import {createLog} from "logging";
|
||||
// import {deleteHistory} from "./deleteHistory.js";
|
||||
|
||||
// export const historyInv = async (date) => {
|
||||
// //console.log(date);
|
||||
// if (!date) {
|
||||
// return `Missing Data`;
|
||||
// }
|
||||
// // date should be sent over as a string IE: 2024-01-01
|
||||
// let inv = [];
|
||||
// try {
|
||||
// inv = await prisma.historyInventory.findMany({where: {histDate: date}});
|
||||
// console.log(inv.length);
|
||||
// // if the date returns nothing we need to pull the historical data
|
||||
// if (inv.length === 0) {
|
||||
// const result = await prisma.settings.findFirst({where: {name: "plantToken"}});
|
||||
// try {
|
||||
// const plantUpdate = historicalInv.replaceAll("test1", result.value);
|
||||
// const queryDate = plantUpdate.replaceAll("[date]", date);
|
||||
// inv = await runQuery(queryDate, "Get histical inv");
|
||||
|
||||
// return inv;
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", "There was an error getting the historical inv.");
|
||||
// return error;
|
||||
// }
|
||||
// } else {
|
||||
// return inv;
|
||||
// }
|
||||
// //return inv;
|
||||
// } catch (error) {
|
||||
// console.log(error);
|
||||
// return error;
|
||||
// }
|
||||
// };
|
||||
|
||||
// // start the cron job for getting the hostrical inv based on the plants shift time
|
||||
// export const startCronHist = () => {
|
||||
// let shiftTime = ["06", "00", "00"];
|
||||
// const startProcess = async () => {
|
||||
// let inv = [];
|
||||
// let articles = [];
|
||||
// let plantToken = "test1";
|
||||
// const date = new Date();
|
||||
// const dateString = date.toISOString().split("T")[0];
|
||||
// date.setDate(date.getDate() - 30);
|
||||
// const oldDate = date.toISOString().split("T")[0];
|
||||
|
||||
// // checking if even need to run this
|
||||
// // before adding more make sure we dont already have data
|
||||
// const checkInv = await prisma.historyInventory.findFirst({where: {histDate: dateString}});
|
||||
// if (checkInv) {
|
||||
// createLog(
|
||||
// "general/eom",
|
||||
// "warn",
|
||||
// `There seems to already be inventory added for ${dateString}, no new data will be added`
|
||||
// );
|
||||
// return;
|
||||
// }
|
||||
// // get plant token
|
||||
// try {
|
||||
// const result = await prisma.settings.findFirst({where: {name: "plantToken"}});
|
||||
// plantToken = result.value;
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", "failed to get planttoken");
|
||||
// }
|
||||
// //get shift time
|
||||
// try {
|
||||
// const result = await runQuery(getShiftTime.replaceAll("test1", plantToken), "GettingShift time");
|
||||
// shiftTime = result[0].shiftStartTime.split(":");
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", `Error running getShift Query: ${error}`);
|
||||
// }
|
||||
|
||||
// // get inventory
|
||||
// try {
|
||||
// const result = await runQuery(totalInvNoRn.replaceAll("test1", plantToken), "getting inventory");
|
||||
// inv = result;
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", `Error running get inventory Query: ${error}`);
|
||||
// }
|
||||
|
||||
// // get active articles
|
||||
// try {
|
||||
// const result = await runQuery(activeArticle.replaceAll("test1", plantToken), "Get active articles");
|
||||
// articles = result;
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", `Error running get article: ${error}`);
|
||||
// }
|
||||
|
||||
// //add the inventory to the historical table
|
||||
// try {
|
||||
// let hist = Object.entries(inv).map(([key, value]) => {
|
||||
// // remove the values we dont want in the historical view
|
||||
// const {total_Pallets, avalible_Pallets, coa_Pallets, held_Pallets, ...histData} = value;
|
||||
|
||||
// // get av tyep
|
||||
// const avType = articles.filter((a) => (a.IdArtikelvarianten = inv[key].av))[0].TypeOfMaterial;
|
||||
// // add in the new fields
|
||||
// const hist = {
|
||||
// ...histData,
|
||||
// histDate: dateString, //new Date(Date.now()).toISOString().split("T")[0],
|
||||
// avType,
|
||||
// };
|
||||
// return hist;
|
||||
// });
|
||||
|
||||
// try {
|
||||
// const addHistData = await prisma.historyInventory.createMany({data: hist});
|
||||
// createLog(
|
||||
// "general/eom",
|
||||
// "info",
|
||||
// `${addHistData.count} were just added to the historical inventory for date ${dateString}`
|
||||
// );
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", `Adding new historical inventory error: ${error}`);
|
||||
// }
|
||||
|
||||
// // delete the older inventory
|
||||
// deleteHistory(oldDate);
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", `Adding new historical inventory error: ${error}`);
|
||||
// }
|
||||
// };
|
||||
|
||||
// // actaully run the process once after restaart just to make sure we have inventory
|
||||
// startProcess();
|
||||
|
||||
// // setup the cron stuff
|
||||
// const startHour = shiftTime[0];
|
||||
// const startMin = shiftTime[1];
|
||||
// createLog("general/eom", "info", `Historical Data will run at ${shiftTime[0]}:${shiftTime[1]} daily`);
|
||||
// cron.schedule(`${startMin} ${startHour} * * *`, () => {
|
||||
// createLog("general/eom", "info", "Running historical invnetory.");
|
||||
// startProcess();
|
||||
// });
|
||||
// };
|
||||
15
server/services/eom/controller/removeHistorical.ts
Normal file
15
server/services/eom/controller/removeHistorical.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
// import {prisma} from "database";
|
||||
// import {createLog} from "logging";
|
||||
|
||||
// export const deleteHistory = async (date: string) => {
|
||||
// // delete the inventory if it equals this date
|
||||
// try {
|
||||
// const remove = await prisma.$executeRaw`
|
||||
// DELETE FROM historyInventory
|
||||
// WHERE histDate < ${date}
|
||||
// `;
|
||||
// createLog("general/eom", "info", `${remove} were just remove from the historical inventory for date: ${date}`);
|
||||
// } catch (error) {
|
||||
// createLog("general/eom", "error", `Removing historical inventory error: ${error}`);
|
||||
// }
|
||||
// };
|
||||
@@ -2,4 +2,12 @@ import {OpenAPIHono} from "@hono/zod-openapi";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
import stats from "./route/stats.js";
|
||||
import history from "./route/invHistory.js";
|
||||
const routes = [stats, history] as const;
|
||||
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
app.route("/eom", route);
|
||||
});
|
||||
|
||||
export default app;
|
||||
|
||||
41
server/services/eom/route/invHistory.ts
Normal file
41
server/services/eom/route/invHistory.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
const EomStat = z.object({
|
||||
plant: z.string().openapi({ example: "Salt Lake City" }),
|
||||
userRan: z.string().openapi({ example: "smith034" }),
|
||||
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
|
||||
});
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["eom"],
|
||||
summary: "Gets the correct eom history.",
|
||||
method: "post",
|
||||
path: "/histinv",
|
||||
request: {
|
||||
params: EomStat,
|
||||
},
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
//const body = await c.req.json();
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
//apiHit(c, { endpoint: `api/logger/logs/id` });
|
||||
try {
|
||||
return c.json({ success: true, message: "", data: [] }, 200);
|
||||
} catch (error) {
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error posting the eom stat.",
|
||||
data: error,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
41
server/services/eom/route/stats.ts
Normal file
41
server/services/eom/route/stats.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
const EomStat = z.object({
|
||||
plant: z.string().openapi({ example: "Salt Lake City" }),
|
||||
userRan: z.string().openapi({ example: "smith034" }),
|
||||
eomSheetVersion: z.string().openapi({ example: "0.0.223" }),
|
||||
});
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["eom"],
|
||||
summary: "Adds in the stats for the eom.",
|
||||
method: "post",
|
||||
path: "/stats",
|
||||
request: {
|
||||
params: EomStat,
|
||||
},
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
//const body = await c.req.json();
|
||||
// make sure we have a vaid user being accessed thats really logged in
|
||||
apiHit(c, { endpoint: `api/logger/logs/id` });
|
||||
try {
|
||||
return c.json({ success: true, message: "", data: [] }, 200);
|
||||
} catch (error) {
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error posting the eom stat.",
|
||||
data: error,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
@@ -1,7 +0,0 @@
|
||||
import {createLog} from "../../logger/logger.js";
|
||||
|
||||
export const sendEmail = async () => {
|
||||
createLog("info", "lst", "general", "Preparing to send an email");
|
||||
|
||||
// settings
|
||||
};
|
||||
@@ -1,4 +1,4 @@
|
||||
import {and, eq, inArray, lte, sql} from "drizzle-orm";
|
||||
import { and, eq, gte, inArray, lte, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../database/dbclient.js";
|
||||
import { logs } from "../../../../database/schema/logs.js";
|
||||
import { createLog } from "../logger.js";
|
||||
@@ -6,13 +6,17 @@ import {createLog} from "../logger.js";
|
||||
export const getLogs = async (data: any) => {
|
||||
try {
|
||||
// clear all remaining logs ne to info.
|
||||
const checked = data.checked && data.checked[0] === "true" ? true : false || false;
|
||||
const checked =
|
||||
data.checked && data.checked[0] === "true" ? true : false || false;
|
||||
const logData = await db
|
||||
.select()
|
||||
.from(logs)
|
||||
.where(
|
||||
and(
|
||||
lte(logs.created_at, sql.raw(`NOW() - INTERVAL '${data.hours} hours'`)),
|
||||
gte(
|
||||
logs.created_at,
|
||||
sql.raw(`NOW() - INTERVAL '${data.hours ?? "4"} hours'`)
|
||||
),
|
||||
inArray(logs.service, data.service),
|
||||
inArray(logs.level, data.level),
|
||||
eq(logs.checked, checked)
|
||||
@@ -22,7 +26,16 @@ export const getLogs = async (data: any) => {
|
||||
return { success: true, message: "logs returned", data: logData };
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
createLog("error", "lst", "logger", `There was an error deleteing server logs. ${error}`);
|
||||
return {success: false, message: "An error occured while trying to get the logs", error};
|
||||
createLog(
|
||||
"error",
|
||||
"lst",
|
||||
"logger",
|
||||
`There was an error deleteing server logs. ${error}`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
message: "An error occured while trying to get the logs",
|
||||
error,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { pino, type LogFn, type Logger } from "pino";
|
||||
|
||||
export let logLevel = "info";
|
||||
export let logLevel = process.env.LOGLEVEL || "info";
|
||||
|
||||
const transport = pino.transport({
|
||||
targets: [
|
||||
@@ -45,6 +45,9 @@ export const createLog = (
|
||||
if (level in log) {
|
||||
log[level]({ username, service }, message);
|
||||
} else {
|
||||
log.warn({username, service}, `Invalid log level '${level}', falling back to warn: ${message}`);
|
||||
log.warn(
|
||||
{ username, service },
|
||||
`Invalid log level '${level}', falling back to warn: ${message}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
102
server/services/logistics/controller/returnMaterial.ts
Normal file
102
server/services/logistics/controller/returnMaterial.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { ConsoleLogWriter } from "drizzle-orm";
|
||||
import { prodEndpointCreation } from "../../../globalUtils/createUrl.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
import { query } from "../../sqlServer/prodSqlServer.js";
|
||||
import { labelData } from "../../sqlServer/querys/materialHelpers/labelInfo.js";
|
||||
import axios from "axios";
|
||||
import { laneInfo } from "../../sqlServer/querys/materialHelpers/laneInfo.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
|
||||
type Data = {
|
||||
runningNr: string;
|
||||
laneName: string;
|
||||
};
|
||||
export const returnMaterial = async (data: Data, prod: any) => {
|
||||
const { runningNr, laneName } = data;
|
||||
// replace the rn
|
||||
const rnReplace = labelData.replaceAll("[rn]", runningNr);
|
||||
|
||||
// get the lane id by name
|
||||
const laneQuery = laneInfo.replaceAll("[laneName]", laneName);
|
||||
|
||||
let barcode;
|
||||
// get the barcode from the running number
|
||||
try {
|
||||
barcode = await query(rnReplace, "labelData");
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
createLog(
|
||||
"error",
|
||||
prod.user.username,
|
||||
"logistics",
|
||||
`Error getting barcode: ${error}`
|
||||
);
|
||||
}
|
||||
|
||||
const { data: laneData, error: laneError } = await tryCatch(
|
||||
query(laneQuery, "laneInfo")
|
||||
);
|
||||
|
||||
if (laneError) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The lane you entered is either deactivated or dose not exist.",
|
||||
laneError,
|
||||
};
|
||||
}
|
||||
|
||||
if (!laneData) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The lane you entered is either deactivated or dose not exist.",
|
||||
};
|
||||
}
|
||||
|
||||
if (laneData.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"The lane you entered is either deactivated or dose not exist.",
|
||||
};
|
||||
}
|
||||
|
||||
if (barcode.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: "The running number you've is not in stock.",
|
||||
};
|
||||
//throw Error("The provided runningNr is not in stock");
|
||||
}
|
||||
// create the url to post
|
||||
const url = await prodEndpointCreation(
|
||||
"/public/v1.0/IssueMaterial/ReturnPartiallyConsumedManualMaterial"
|
||||
);
|
||||
|
||||
const returnSomething = {
|
||||
laneId: laneData[0]?.laneID,
|
||||
barcode: barcode[0]?.barcode,
|
||||
};
|
||||
|
||||
try {
|
||||
const results = await axios.post(url, returnSomething, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Basic ${prod.user.prod}`,
|
||||
},
|
||||
});
|
||||
//console.log(results);
|
||||
return {
|
||||
success: true,
|
||||
message: "Material was returned",
|
||||
status: results.status,
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
status: 200,
|
||||
message: error.response?.data.errors[0].message,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -1,9 +1,10 @@
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
|
||||
import comsumeMaterial from "./route/consumeMaterial.js";
|
||||
import returnMat from "./route/returnMaterial.js";
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const routes = [comsumeMaterial] as const;
|
||||
const routes = [comsumeMaterial, returnMat] as const;
|
||||
|
||||
// app.route("/server", modules);
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
|
||||
70
server/services/logistics/route/returnMaterial.ts
Normal file
70
server/services/logistics/route/returnMaterial.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
|
||||
import { apiHit } from "../../../globalUtils/apiHits.js";
|
||||
import { verify } from "hono/jwt";
|
||||
import { returnMaterial } from "../controller/returnMaterial.js";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const responseSchema = z.object({
|
||||
success: z.boolean().optional().openapi({ example: true }),
|
||||
message: z.string().optional().openapi({ example: "user access" }),
|
||||
});
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["logistics"],
|
||||
summary: "Retrns material based on its running number and laneName",
|
||||
method: "post",
|
||||
path: "/return",
|
||||
middleware: authMiddleware,
|
||||
description:
|
||||
"Provided a running number and Lane to return the material.",
|
||||
responses: {
|
||||
200: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "stopped",
|
||||
},
|
||||
400: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "Failed to stop",
|
||||
},
|
||||
401: {
|
||||
content: { "application/json": { schema: responseSchema } },
|
||||
description: "Failed to stop",
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
apiHit(c, { endpoint: "api/sqlProd/close" });
|
||||
const authHeader = c.req.header("Authorization");
|
||||
const token = authHeader?.split("Bearer ")[1] || "";
|
||||
|
||||
try {
|
||||
const payload = await verify(token, process.env.JWT_SECRET!);
|
||||
try {
|
||||
//return apiReturn(c, true, access?.message, access?.data, 200);
|
||||
const data = await c.req.json();
|
||||
const consume = await returnMaterial(data, payload);
|
||||
return c.json(
|
||||
{ success: consume?.success, message: consume?.message },
|
||||
200
|
||||
);
|
||||
} catch (error) {
|
||||
//console.log(error);
|
||||
//return apiReturn(c, false, "Error in setting the user access", error, 400);
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "Missing data please try again",
|
||||
error,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
return c.json({ success: false, message: "Unauthorized" }, 401);
|
||||
}
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
@@ -0,0 +1,143 @@
|
||||
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
|
||||
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
|
||||
export interface DownTime {
|
||||
downTimeId?: number;
|
||||
machineAlias?: string;
|
||||
}
|
||||
export default async function reprintLabelMonitor(notifyData: any) {
|
||||
// we will over ride this with users that want to sub to this
|
||||
// a new table will be called subalerts and link to the do a kinda linkn where the user wants it then it dose subId: 1, userID: x, notificationId: y. then in here we look up the userid to get the email :D
|
||||
// this could then leave the emails in the notificaion blank and let users sub to it.
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// console.log(data.secondarySetting[0].duration);
|
||||
let dQuery = `
|
||||
SELECT
|
||||
[IdHistoryStillstandsereignis] as downTimeId
|
||||
,DATEDIFF(MINUTE,b.[Startzeit], b.[Endzeit]) as totalDuration
|
||||
--, b.[IdMaschine]
|
||||
,x.[Bezeichnung] as machineAlias
|
||||
--,b.[IdStillstandsGrund],
|
||||
, c.CTO_Code
|
||||
,c.Downtime_Description
|
||||
--,b.[IdFehlermerkmal],
|
||||
,case when g.DT_Group_Desc is null then 'Not assigned yet' else g.DT_Group_Desc end as groupDesc
|
||||
,b.[Bemerkung] as remark
|
||||
,CONVERT(VARCHAR, CAST(b.[Startzeit] AS DATETIME), 100) dtStart
|
||||
,CONVERT(VARCHAR, CAST(b.[Endzeit] AS DATETIME), 100) dtEnd
|
||||
|
||||
FROM Alplaprod_test1.[dbo].[T_HistoryStillstandsereignis] (nolock)b
|
||||
|
||||
--get the machine info
|
||||
left join
|
||||
Alplaprod_test1.[dbo].[T_Maschine] (nolock)x
|
||||
on b.IdMaschine = x.IdMaschine
|
||||
|
||||
-- add in the cto codes
|
||||
left join
|
||||
Alplaprod_test1.[dbo].[V_MES_Downtime_Reasons] (nolock)c
|
||||
on b.IdStillstandsGrund = c.Local_Downtime_ID
|
||||
|
||||
left join
|
||||
Alplaprod_test1.[dbo].[V_MES_Downtime_Characteristics] (nolock)g
|
||||
on b.IdFehlermerkmal = g.Local_DT_Characteristic_Id
|
||||
|
||||
|
||||
where DATEDIFF(MINUTE,b.[Startzeit],b.[Endzeit]) > ${
|
||||
notifyData.notifiySettings
|
||||
? notifyData.notifiySettings?.duration
|
||||
: 10
|
||||
}
|
||||
and b.[Startzeit] > getDate() - ${
|
||||
notifyData.notifiySettings
|
||||
? notifyData.notifiySettings?.daysInPast
|
||||
: 10
|
||||
} --adding this date check in so we dont get everything possible
|
||||
and c.CTO_Code not like 'a%'
|
||||
and c.CTO_Code not like 'b%'
|
||||
and c.CTO_Code not like 'c%'
|
||||
and c.CTO_Code not like 'd%'
|
||||
and c.CTO_Code not like 'e%'
|
||||
and c.CTO_Code not like 'f%'
|
||||
and c.CTO_Code not like 'y%'
|
||||
order by IdHistoryStillstandsereignis desc
|
||||
`;
|
||||
|
||||
//console.log(query);
|
||||
let downTime: any; //DownTime[];
|
||||
try {
|
||||
downTime = await query(dQuery, "downTimeCheck");
|
||||
//console.log(labels.length);
|
||||
|
||||
if (
|
||||
downTime.length > 0 &&
|
||||
downTime[0]?.downTimeId > notifyData.notifiySettings.prodID
|
||||
) {
|
||||
//send the email :D
|
||||
const emailSetup = {
|
||||
emailTo: notifyData.emails,
|
||||
subject: `Alert! Downtime recorded greater than ${
|
||||
notifyData.notifiySettings?.duration
|
||||
}min ${
|
||||
downTime.length === 1
|
||||
? `on ${downTime[0].machineAlias}`
|
||||
: ""
|
||||
}`,
|
||||
template: "downTimeCheck",
|
||||
context: {
|
||||
items: downTime,
|
||||
secondarySetting: notifyData.notifiySettings,
|
||||
},
|
||||
};
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...notifyData.notifiySettings,
|
||||
prodID: downTime[0].downTimeId,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name))
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`Error from running the downtimeCheck query: ${err}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
|
||||
const notification = async (notifyData: any) => {
|
||||
/**
|
||||
* Pass the entire notification over
|
||||
*/
|
||||
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
// notifiySettings: {
|
||||
// ...updateSettings,
|
||||
// prodID: labels[0].IdEtikettenHistorie,
|
||||
// },
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name))
|
||||
);
|
||||
};
|
||||
|
||||
export default notification;
|
||||
@@ -0,0 +1,133 @@
|
||||
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
|
||||
import { isWeekend } from "date-fns";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
|
||||
export interface PPOO {
|
||||
IdPosition?: number;
|
||||
}
|
||||
export default async function reprintLabelMonitor(notifyData: any) {
|
||||
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// parse the secondarySetting back to json to use it.
|
||||
// notifyData = { ...notifyData, secondarySetting: JSON.parse(notifyData.secondarySetting) };
|
||||
|
||||
// as this one goes to managers we want to not send on the weekends
|
||||
|
||||
const weekend = isWeekend(new Date(Date.now()));
|
||||
|
||||
if (weekend && notifyData.notifiySettings.weekend) {
|
||||
createLog(
|
||||
"info",
|
||||
"notify",
|
||||
"notify",
|
||||
`${notifyData.name} will not run on the weekends`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let notifyQuery = `
|
||||
SELECT
|
||||
--[EinlagerungsDatummin] as lastMovingDate,
|
||||
round(VerfuegbareMengeVPKSum,2) as pallets
|
||||
,VerfuegbareMengeSum as total
|
||||
,round([GesperrteMengeVpkSum],2) as held
|
||||
,round([GesperrteMengeSum],2) as heldQty
|
||||
,[IdArtikelVarianten] as av
|
||||
,[IdProdBereich] as pfcID
|
||||
,[ArtikelVariantenBez] as articleDescription
|
||||
,[ArtikelVariantenAlias] as articleDescriptionAlias
|
||||
,[LagerAbteilungKurzBez] as location
|
||||
,[Lfdnr] as runningNumber
|
||||
,[Produktionslos] as lot
|
||||
,[ProduktionsDatumMin] as productionDate
|
||||
,IdPosition
|
||||
FROM [AlplaPROD_test1].[dbo].[V_LagerPositionenBarcodes] (nolock)
|
||||
|
||||
where idlagerabteilung in ([locations]) and [ProduktionsDatumMin] < DATEadd( Hour, -[timeCheck], getdate())
|
||||
|
||||
order by [ProduktionsDatumMin] asc
|
||||
`;
|
||||
|
||||
//update the time check
|
||||
notifyQuery = notifyQuery.replaceAll("[timeCheck]", notifyData.checkTime);
|
||||
notifyQuery = notifyQuery.replaceAll(
|
||||
"[locations]",
|
||||
notifyData.notifiySettings.locations
|
||||
);
|
||||
|
||||
let prod: PPOO[];
|
||||
try {
|
||||
prod = await query(notifyQuery, "Label Reprints");
|
||||
//console.log(labels.length);
|
||||
// const now = Date.now()
|
||||
if (prod.length > 0) {
|
||||
//send the email :D
|
||||
|
||||
// update the count with the result
|
||||
|
||||
const emailSetup = {
|
||||
emailTo: notifyData.emails,
|
||||
subject: `Alert! Pallets in production greater than ${notifyData.checkTime} ${notifyData.timeType}`,
|
||||
template: "productionCheck",
|
||||
context: {
|
||||
items: prod,
|
||||
count: prod.length,
|
||||
checkTime: notifyData.checkTime,
|
||||
timeCheck: notifyData.timeType,
|
||||
},
|
||||
};
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let updateSettings = notifyData.notifiySettings;
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...updateSettings,
|
||||
count: prod.length,
|
||||
prodID: prod[0].IdPosition,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name))
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
createLog(
|
||||
"error",
|
||||
"sql",
|
||||
"error",
|
||||
`Error from running the Label Reprints query: ${err}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,158 @@
|
||||
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
|
||||
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
|
||||
export interface Blocking {
|
||||
HumanReadableId?: number;
|
||||
subject?: string;
|
||||
}
|
||||
export default async function qualityBlockingMonitor(notifyData: any) {
|
||||
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
let blockQuery = `
|
||||
SELECT
|
||||
'Alert! new blocking order: #' + cast(HumanReadableId as varchar) + ' - ' + ArticleVariantDescription as subject,
|
||||
cast([HumanReadableId] as varchar) as blockingNumber,
|
||||
[ArticleVariantDescription] as article,
|
||||
cast([CustomerHumanReadableId] as varchar) + ' - ' + [CustomerDescription] as customer,
|
||||
convert(varchar(10), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 101) + ' - ' + convert(varchar(5), [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate], 108) as blockingDate,
|
||||
cast(ArticleVariantHumanReadableId as varchar) + ' - ' + ArticleVariantDescription as av,
|
||||
case when [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark = '' or [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark is NULL then 'Please reach out to quality for the reason this was placed on hold as a remark was not entered during the blocking processs' else [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].Remark end as remark,
|
||||
cast(FORMAT(TotalAmountOfPieces, '###,###') as varchar) + ' / ' + cast(LoadingUnit as varchar) as peicesAndLoadingUnits,
|
||||
[test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId as lotNumber,
|
||||
cast(IdGlobalBlockingDefectsGroup as varchar) + ' - ' + BD.Description as mainDefectGroup,
|
||||
cast(IdGlobalBlockingDefect as varchar) + ' - ' + MD.Description as mainDefect,
|
||||
sent=0,
|
||||
lot.MachineLocation as line,
|
||||
HumanReadableId
|
||||
FROM [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder] (nolock)
|
||||
|
||||
/*** Join 1.0 table to get correct id info to link ***/
|
||||
join
|
||||
[AlplaPROD_test1].[dbo].[T_BlockingOrders] (nolock) AS BO
|
||||
on [HumanReadableId] = BO.[IdBlockingOrder]
|
||||
|
||||
|
||||
/*** Get the main defect info ***/
|
||||
Inner join
|
||||
[AlplaPROD_test1].[dbo].[T_BlockingDefectsGroups] (nolock) as BD
|
||||
ON BO.IdMainDefectGroup = BD.IdBlockingDefectsGroup
|
||||
|
||||
INNER join
|
||||
[AlplaPROD_test1].[dbo].[T_BlockingDefects] as MD
|
||||
ON BO.IdMainDefect = MD.IdBlockingDefect
|
||||
/*** get lot info ***/
|
||||
|
||||
left join
|
||||
(SELECT [MachineLocation]
|
||||
,[MachineDescription]
|
||||
,[ProductionLotHumanReadableId]
|
||||
FROM [test1_AlplaPROD2.0_Reporting].[reporting_productionControlling].[ProducedLot]) as lot
|
||||
on [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].ProductionLotHumanReadableId = lot.ProductionLotHumanReadableId
|
||||
|
||||
where [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].[BlockingDate] between getdate() - 1 and getdate() + 1
|
||||
and [test1_AlplaPROD2.0_Reporting].[reporting_blocking].[BlockingOrder].BlockingTrigger = 1
|
||||
and HumanReadableId NOT IN ([sentBlockingOrders])
|
||||
`;
|
||||
|
||||
//add the blocking orders in.
|
||||
blockQuery = blockQuery.replaceAll(
|
||||
"[sentBlockingOrders]",
|
||||
notifyData.sentBlocking[0].sentBlockingOrders
|
||||
);
|
||||
|
||||
let blocking: any;
|
||||
try {
|
||||
blocking = await query(blockQuery, "Quality Blocking");
|
||||
//console.log(labels.length);
|
||||
// const now = Date.now()
|
||||
//console.log(blocking);
|
||||
// console.log(blocking[0].blockingNumber > data.prodID);
|
||||
if (
|
||||
blocking.length > 0 &&
|
||||
blocking[0].HumanReadableId > notifyData.notifiySettings.prodID
|
||||
) {
|
||||
//send the email :D
|
||||
const emailSetup = {
|
||||
emailTo: notifyData.emails,
|
||||
subject:
|
||||
blocking.length > 0
|
||||
? `Alert! New blocking orders.`
|
||||
: blocking[0].subject,
|
||||
template: "qualityBlocking",
|
||||
context: {
|
||||
items: blocking,
|
||||
},
|
||||
};
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"nofity",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// add the new blocking order to this
|
||||
const newBlockingOrders = blocking.map(
|
||||
(b: any) => b.HumanReadableId
|
||||
);
|
||||
|
||||
//console.log(newBlockingOrders);
|
||||
//console.log(sentBlocking[0].sentBlockingOrders);
|
||||
// Ensure no duplicates
|
||||
const uniqueOrders = Array.from(
|
||||
new Set([
|
||||
...notifyData.sentBlocking[0].sentBlockingOrders,
|
||||
...newBlockingOrders,
|
||||
])
|
||||
);
|
||||
|
||||
// Update sentBlockingOrders
|
||||
notifyData.sentBlocking[0].sentBlockingOrders = uniqueOrders;
|
||||
|
||||
//console.log(notifUpdate);
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...notifyData.notifiySettings,
|
||||
prodID: blocking[0].HumanReadableId,
|
||||
sentBlockingOrders: uniqueOrders,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name))
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`Error from running the blocking query: ${err}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
|
||||
export interface Labels {
|
||||
IdEtikettenHistorie?: number;
|
||||
}
|
||||
const notification = async (notifyData: any) => {
|
||||
/**
|
||||
* Pass the entire notification over
|
||||
*/
|
||||
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
|
||||
|
||||
// validate if there are any emails.
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.name}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// well set a backup default time here
|
||||
let timeCheck = `DATEADD(SECOND, -30, getdate()) `;
|
||||
|
||||
// set the time of getting the label
|
||||
if (notifyData.timeType === "sec") {
|
||||
timeCheck = `DATEADD(SECOND, -${notifyData.checkTime}, getdate()) `;
|
||||
} else if (notifyData.timeType === "min") {
|
||||
timeCheck = `DATEADD(MINUTE, -${notifyData.checkTime}, getdate()) `;
|
||||
}
|
||||
|
||||
let reprintQuery = `
|
||||
SELECT
|
||||
IdArtikelvarianten as av,
|
||||
ArtikelVariantenBez as alias,
|
||||
LfdNr as runningNumber,
|
||||
CONVERT(VARCHAR, CAST(Add_Date AS DATETIME), 100) Add_Date,
|
||||
Add_User,
|
||||
CONVERT(VARCHAR, CAST(Upd_Date AS DATETIME), 100) Upd_Date,
|
||||
Upd_User,
|
||||
EtikettenDruckerBezeichnung as printer,
|
||||
AnzahlGedruckterKopien as totalPrinted
|
||||
FROM Alplaprod_test1.dbo.V_EtikettenGedruckt (nolock)
|
||||
where AnzahlGedruckterKopien > 2
|
||||
and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108)
|
||||
and Upd_Date > DATEADD(SECOND, -30, getdate())
|
||||
and VpkVorschriftBez not like '%$%'
|
||||
`;
|
||||
|
||||
//update the time check
|
||||
reprintQuery = reprintQuery.replaceAll(
|
||||
"DATEADD(SECOND, -30, getdate()) ",
|
||||
timeCheck
|
||||
);
|
||||
|
||||
//let labels: Labels[];
|
||||
|
||||
const { data: labels, error: labelError } = await tryCatch(
|
||||
query(reprintQuery, "Label Reprints")
|
||||
);
|
||||
|
||||
if (labels.length > 0) {
|
||||
//send the email :D
|
||||
const emailSetup = {
|
||||
emailTo: notifyData.emails,
|
||||
subject: "Alert! Label Reprinted",
|
||||
template: "reprintLabels",
|
||||
context: {
|
||||
items: labels,
|
||||
},
|
||||
};
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// // update the last time we ran and the prod id
|
||||
// const notifUpdate = {
|
||||
// prodID: labels[0].IdEtikettenHistorie,
|
||||
// lastRan: nowDate(),
|
||||
// };
|
||||
|
||||
// update the last time ran
|
||||
const updateSettings = notifyData.notifiySettings;
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...updateSettings,
|
||||
prodID: labels[0].IdEtikettenHistorie,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name))
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
export default notification;
|
||||
@@ -0,0 +1,129 @@
|
||||
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
|
||||
|
||||
import { isWeekend } from "date-fns";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { sendEmail } from "../sendMail.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
|
||||
export interface PPOO {
|
||||
IdPosition?: number;
|
||||
}
|
||||
export default async function reprintLabelMonitor(notifyData: any) {
|
||||
createLog("info", "notify", "notify", `monitoring ${notifyData.name}`);
|
||||
if (notifyData.emails === "") {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There are no emails set for ${notifyData.notificationName}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// as this one goes to managers we want to not send on the weekends
|
||||
|
||||
const weekend = isWeekend(new Date(Date.now()));
|
||||
|
||||
if (weekend && notifyData.notifiySettings.weekend) {
|
||||
createLog(
|
||||
"info",
|
||||
"notify",
|
||||
"notify",
|
||||
`${notifyData.name} will not run on the weekends`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let noteQuery = `
|
||||
SELECT
|
||||
--[EinlagerungsDatummin] as lastMovingDate,
|
||||
round(VerfuegbareMengeVPKSum,2) as pallets
|
||||
,VerfuegbareMengeSum as total
|
||||
,round([GesperrteMengeVpkSum],2) as held
|
||||
,round([GesperrteMengeSum],2) as heldQty
|
||||
,[IdArtikelVarianten] as av
|
||||
,[IdProdBereich] as pfcID
|
||||
,[ArtikelVariantenBez] as articleDescription
|
||||
,[ArtikelVariantenAlias] as articleDescriptionAlias
|
||||
,[LagerAbteilungKurzBez] as location
|
||||
,[Lfdnr] as runningNumber
|
||||
,[Produktionslos] as lot
|
||||
,[ProduktionsDatumMin] as productionDate
|
||||
,IdPosition
|
||||
FROM [AlplaPROD_test1].[dbo].[V_LagerPositionenBarcodes] (nolock)
|
||||
|
||||
where idlagerabteilung in ([locations]) and [ProduktionsDatumMin] < DATEadd( Hour, -[timeCheck], getdate())
|
||||
|
||||
order by [ProduktionsDatumMin] asc
|
||||
`;
|
||||
|
||||
//update the time check
|
||||
noteQuery = noteQuery
|
||||
.replaceAll("[timeCheck]", notifyData.checkTime)
|
||||
.replaceAll("[locations]", notifyData.notifiySettings.locations);
|
||||
|
||||
let stage: PPOO[];
|
||||
try {
|
||||
stage = await query(noteQuery, "Staging checks");
|
||||
//console.log(labels.length);
|
||||
// const now = Date.now()
|
||||
if (stage.length > 0) {
|
||||
//send the email :D
|
||||
|
||||
// update the count with the result
|
||||
|
||||
const emailSetup = {
|
||||
emailTo: notifyData.emails,
|
||||
subject: `Alert! Pallets in staging greater than ${notifyData.checkTime} ${notifyData.timeType}`,
|
||||
template: "stagingCheck",
|
||||
context: {
|
||||
items: stage,
|
||||
count: stage.length,
|
||||
checkTime: notifyData.checkTime,
|
||||
timeCheck: notifyData.timeType,
|
||||
},
|
||||
};
|
||||
|
||||
const sentEmail = await sendEmail(emailSetup);
|
||||
|
||||
if (!sentEmail.success) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
"Failed to send email, will try again on next interval"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// update the last time we ran and the prod id
|
||||
let updateSettings = notifyData.notifiySettings;
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...updateSettings,
|
||||
count: stage.length,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, notifyData.name))
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`Error from running the Label Reprints query: ${err}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,201 @@
|
||||
export let xmlPayloadTI = `
|
||||
<service-request>
|
||||
<service-id>ImportWeb</service-id>
|
||||
<request-id>[requestID]</request-id>
|
||||
<data>
|
||||
<WebImport>
|
||||
[WebImportHeader]
|
||||
<WebImportFile>
|
||||
<MercuryGate>
|
||||
<Header>
|
||||
<SenderID/>
|
||||
<ReceiverID/>
|
||||
<DocTypeID>MasterBillOfLading</DocTypeID>
|
||||
<DocCount>1</DocCount>
|
||||
</Header>
|
||||
<Load action="UpdateOrAdd">
|
||||
<Enterprise name="" customerAcctNum="[customerAccountNum]"/>
|
||||
<AssignedTo/>
|
||||
<ReferenceNumbers>
|
||||
<ReferenceNumber type="Load Number" isPrimary="true">[loadNumber]</ReferenceNumber>
|
||||
</ReferenceNumbers>
|
||||
<Payment>
|
||||
<Method>Prepaid</Method>
|
||||
<BillTo thirdParty="False">
|
||||
<Address Type="BillTo" isResidential="False">
|
||||
<Alias/>
|
||||
<Name>ALPLA</Name>
|
||||
<AddrLine1>CO TRANSPORTATION INSIGHT</AddrLine1>
|
||||
<AddrLine2>PO BOX 23000</AddrLine2>
|
||||
<City>HICKORY</City>
|
||||
<StateProvince>NC</StateProvince>
|
||||
<PostalCode>28603</PostalCode>
|
||||
<CountryCode>USA</CountryCode>
|
||||
<Contacts/>
|
||||
</Address>
|
||||
</BillTo>
|
||||
</Payment>
|
||||
<PriceSheets>
|
||||
<PriceSheet type="Carrier" isSelected="false"> // get this from the price sheet
|
||||
<ContractId/>
|
||||
<SCAC/>
|
||||
<Mode/>
|
||||
</PriceSheet>
|
||||
</PriceSheets>
|
||||
<Plan>
|
||||
<Events count="2">
|
||||
<Event type="Pickup" sequenceNum="1">
|
||||
<Dates>
|
||||
<Date type="earliest">[loadingDate]</Date>
|
||||
<Date type="latest">[deliveryDate]</Date>
|
||||
</Dates>
|
||||
<Address type="" isResidential="" isPrimary="false">
|
||||
<LocationCode/>
|
||||
<Name>[plantName]</Name>
|
||||
<AddrLine1>[plantStreetAddress]</AddrLine1>
|
||||
<AddrLine2/>
|
||||
<City>[plantCity]</City>
|
||||
<StateProvince>[plantState]</StateProvince>
|
||||
<PostalCode>[plantZipCode]</PostalCode>
|
||||
<CountryCode>USA</CountryCode>
|
||||
<Contacts>
|
||||
<Contact type="">
|
||||
<Name/>
|
||||
<ContactMethods>
|
||||
<ContactMethod sequenceNum="1" type="phone">[contactNum]</ContactMethod>
|
||||
<ContactMethod sequenceNum="1" type="email">[contactEmail]</ContactMethod>
|
||||
</ContactMethods>
|
||||
</Contact>
|
||||
</Contacts>
|
||||
</Address>
|
||||
<Shipments>
|
||||
<ReferenceNumbers>
|
||||
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
|
||||
</ReferenceNumbers>
|
||||
</Shipments>
|
||||
</Event>
|
||||
<Event type="Drop" sequenceNum="2">
|
||||
<Dates>
|
||||
<Date type="earliest">[loadingDate]</Date>
|
||||
<Date type="latest">[deliveryDate]</Date>
|
||||
</Dates>
|
||||
<Address type="" isResidential="" isPrimary="false">
|
||||
<LocationCode/>
|
||||
<Name>[customerName]</Name>
|
||||
<AddrLine1>[customerStreetAddress]</AddrLine1>
|
||||
<AddrLine2/>
|
||||
<City>[customerCity]</City>
|
||||
<StateProvince>[customerState]</StateProvince>
|
||||
<PostalCode>[customerZip]</PostalCode>
|
||||
<CountryCode>USA</CountryCode>
|
||||
<Contacts>
|
||||
<Contact type="">
|
||||
<Name/>
|
||||
<ContactMethods>
|
||||
<ContactMethod sequenceNum="1" type="phone">800-555-1122</ContactMethod>
|
||||
</ContactMethods>
|
||||
</Contact>
|
||||
</Contacts>
|
||||
</Address>
|
||||
<Shipments>
|
||||
<ReferenceNumbers>
|
||||
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
|
||||
</ReferenceNumbers>
|
||||
</Shipments>
|
||||
</Event>
|
||||
|
||||
</Events>
|
||||
</Plan>
|
||||
<Shipments>
|
||||
<Shipment type="Regular" action="UpdateOrAdd">
|
||||
<Status>Pending</Status>
|
||||
<Enterprise name="" customerAcctNum="[customerAccountNum]"/>
|
||||
<ReferenceNumbers>
|
||||
<ReferenceNumber type="Shipment Number" isPrimary="true">[shipNumber]</ReferenceNumber>
|
||||
<ReferenceNumber type="PO Number" isPrimary="false">[customerPO]</ReferenceNumber>
|
||||
[multieReleaseNumber]
|
||||
<ReferenceNumber type="Store Number" isPrimary="false">[glCoding]</ReferenceNumber>
|
||||
<ReferenceNumber type="Profit Center" isPrimary="false">[pfc]</ReferenceNumber>
|
||||
</ReferenceNumbers>
|
||||
<Services/>
|
||||
<EquipmentList/>
|
||||
6
|
||||
<Dates>
|
||||
<Pickup>
|
||||
<Date type="earliest">[loadingDate]</Date>
|
||||
<Date type="latest">[loadingDate]</Date>
|
||||
</Pickup>
|
||||
<Drop>
|
||||
<Date type="earliest">[deliveryDate]</Date>
|
||||
<Date type="latest">[deliveryDate]</Date>
|
||||
</Drop>
|
||||
</Dates>
|
||||
<PriceSheets>
|
||||
<PriceSheet type="Carrier" isSelected="false">
|
||||
<ContractId/>
|
||||
<SCAC/>
|
||||
<Mode/>
|
||||
</PriceSheet>
|
||||
</PriceSheets>
|
||||
<Shipper>
|
||||
<Address type="" isResidential="" isPrimary="false">
|
||||
<LocationCode/>
|
||||
<Name>[plantName]</Name>
|
||||
<AddrLine1>[plantStreetAddress]</AddrLine1>
|
||||
<AddrLine2/>
|
||||
<City>[plantCity]</City>
|
||||
<StateProvince>[plantState]</StateProvince>
|
||||
<PostalCode>[plantZipCode]</PostalCode>
|
||||
<CountryCode>USA</CountryCode>
|
||||
<Contacts>
|
||||
<Contact type="">
|
||||
<Name/>
|
||||
<ContactMethods>
|
||||
<ContactMethod sequenceNum="1" type="phone">[contactNum]</ContactMethod>
|
||||
</ContactMethods>
|
||||
</Contact>
|
||||
</Contacts>
|
||||
</Address>
|
||||
</Shipper>
|
||||
<Consignee>
|
||||
<Address type="" isResidential="" isPrimary="false">
|
||||
<LocationCode/>
|
||||
<Name>[customer]</Name>
|
||||
<AddrLine1>[customerStreetAddress]</AddrLine1>
|
||||
<AddrLine2/>
|
||||
<City>[customerCity]</City>
|
||||
<StateProvince>[customerState]</StateProvince>
|
||||
<PostalCode>[customerZip]</PostalCode>
|
||||
<CountryCode>USA</CountryCode>
|
||||
<Contacts />
|
||||
|
||||
</Address>
|
||||
</Consignee>
|
||||
<ItemGroups>
|
||||
[items]
|
||||
</ItemGroups>
|
||||
<Payment>
|
||||
<Method>Prepaid</Method>
|
||||
<BillTo thirdParty="False">
|
||||
<Address Type="BillTo" isResidential="False">
|
||||
<Alias/>
|
||||
<Name>ALPLA</Name>
|
||||
<AddrLine1>CO TRANSPORTATION INSIGHT</AddrLine1>
|
||||
<AddrLine2>PO BOX 23000</AddrLine2>
|
||||
<City>HICKORY</City>
|
||||
<StateProvince>NC</StateProvince>
|
||||
<PostalCode>28603</PostalCode>
|
||||
<CountryCode>USA</CountryCode>
|
||||
<Contacts/>
|
||||
</Address>
|
||||
</BillTo>
|
||||
</Payment>
|
||||
</Shipment>
|
||||
</Shipments>
|
||||
</Load>
|
||||
</MercuryGate>
|
||||
</WebImportFile>
|
||||
</WebImport>
|
||||
</data>
|
||||
</service-request>
|
||||
`;
|
||||
@@ -0,0 +1,433 @@
|
||||
import { xmlPayloadTI } from "./tiFullFlow/tiXmlPayload.js";
|
||||
import axios from "axios";
|
||||
import querystring from "querystring";
|
||||
import { getOrderToSend } from "../../../sqlServer/querys/notifications/ti/getOrderToSend.js";
|
||||
import { getHeaders } from "../../../sqlServer/querys/notifications/ti/getHeaders.js";
|
||||
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../../database/schema/settings.js";
|
||||
import { serverData } from "../../../../../database/schema/serverData.js";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { notifications } from "../../../../../database/schema/notifications.js";
|
||||
import { query } from "../../../sqlServer/prodSqlServer.js";
|
||||
import { createLog } from "../../../logger/logger.js";
|
||||
import { freightClass } from "../../../../globalUtils/freightClass.js";
|
||||
import { delay } from "../../../../globalUtils/delay.js";
|
||||
|
||||
const dateCorrection = (newDate: any) => {
|
||||
return new Date(newDate)
|
||||
.toLocaleString("en-US", {
|
||||
timeZone: "UTC",
|
||||
year: "numeric",
|
||||
month: "2-digit",
|
||||
day: "2-digit",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
hourCycle: "h23", // Ensures 24-hour format
|
||||
})
|
||||
.replace(",", "");
|
||||
};
|
||||
|
||||
const tiImport = async () => {
|
||||
//await initializePool();
|
||||
|
||||
// get the plant token
|
||||
const { data: plantData, error: plantError } = await tryCatch(
|
||||
db.select().from(settings)
|
||||
);
|
||||
//await initializePool();
|
||||
if (plantError) return;
|
||||
const plantToken = plantData?.filter((n) => n.name === "plantToken");
|
||||
|
||||
const { data: plantInfo, error: plantEr } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(serverData)
|
||||
.where(eq(serverData.plantToken, plantToken[0].value))
|
||||
);
|
||||
|
||||
// parsing posting window
|
||||
const plantI = plantInfo!;
|
||||
//const postTime = JSON.parse(plantI[0]?.tiPostTime!);
|
||||
|
||||
// order notifications
|
||||
const { data: notificationSet, error: notificationSettingsErr } =
|
||||
await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(eq(notifications.name, "tiIntergration"))
|
||||
);
|
||||
if (notificationSettingsErr) return;
|
||||
|
||||
const notiSet: any = notificationSet;
|
||||
//creds
|
||||
const userid = "ALPLAWSTEST";
|
||||
const password = "oe39U1LuLX9ZdY0XKobG";
|
||||
|
||||
// const requestID = `ALPLAPBTEST1`; // production will be alpla01-dateTime - this will be the time it was sent over.
|
||||
const requestUser = "ALPLAWSTEST"; // if alplaprod_rs -- confirm we can use a user name vs the AlplapIMPORT // needs to stay the same as provied
|
||||
|
||||
const customerAccountNum = plantI[0].customerTiAcc as string; // ti
|
||||
|
||||
// it we dont get anything here we want to make sure we add it in
|
||||
|
||||
// get current releaes not in the already sent oders
|
||||
let orders = getHeaders;
|
||||
orders = orders
|
||||
.replaceAll("test1", plantToken[0].value)
|
||||
.replaceAll("[from]", notiSet?.notifiySettings.start)
|
||||
.replaceAll("[to]", notiSet?.notifiySettings.end)
|
||||
.replaceAll(
|
||||
"[exclude]",
|
||||
notiSet.notifiySettings.processed
|
||||
.map((num: any) => `'${num}'`)
|
||||
.join(", ")
|
||||
);
|
||||
|
||||
//console.log(orders);
|
||||
let headerPending = [];
|
||||
try {
|
||||
headerPending = await query(orders, "Ti get open headers");
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
if (headerPending.length === 0) {
|
||||
createLog(
|
||||
"info",
|
||||
"notification",
|
||||
"notify",
|
||||
"There are no pending orders to be sent over to ti."
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
code: 1,
|
||||
message: "There are no pending orders to be sent over to ti.",
|
||||
};
|
||||
}
|
||||
|
||||
createLog(
|
||||
"info",
|
||||
"notification",
|
||||
"notify",
|
||||
`There are a total of ${headerPending.length} to send over`
|
||||
);
|
||||
// update query to have the correct plant token
|
||||
let orderToSend = getOrderToSend;
|
||||
orderToSend = orderToSend
|
||||
.replaceAll("test1", plantToken[0].value)
|
||||
.replaceAll("[releaseToProcess]", `'${headerPending[0].releaseNumber}'`)
|
||||
.replaceAll("[from]", notiSet.notifiySettings.start)
|
||||
.replaceAll("[to]", notiSet.notifiySettings.end);
|
||||
|
||||
// console.log(orderToSend);
|
||||
let records = [];
|
||||
try {
|
||||
records = await query(orderToSend, "Ti send order");
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
//console.log(headerPending.length);
|
||||
|
||||
// update the header
|
||||
let webHeader = `
|
||||
<request-id>[requestID]</request-id>
|
||||
<data>
|
||||
<WebImport>
|
||||
<WebImportHeader>
|
||||
<FileName>[requestID].XML</FileName>
|
||||
<Type>SOTransportLoader</Type>
|
||||
<UserName>[requestUser]</UserName>
|
||||
</WebImportHeader>
|
||||
`;
|
||||
|
||||
webHeader = webHeader.replaceAll(
|
||||
"[requestID]",
|
||||
`${records[0].releaseNumber}-${plantToken[0].value}`
|
||||
);
|
||||
webHeader = webHeader.replaceAll("[requestUser]", requestUser);
|
||||
|
||||
// update the special instructions section
|
||||
const otherSettings = plantI[0]?.otherSettings as {
|
||||
specialInstructions: string;
|
||||
active: boolean;
|
||||
}[];
|
||||
|
||||
const specialInfo = otherSettings[0].specialInstructions.replaceAll(
|
||||
"[header]",
|
||||
records[0].Header
|
||||
);
|
||||
// this part will link into the <ItemGroups></ItemGroups>
|
||||
let itemGroups = "";
|
||||
|
||||
for (let i = 0; i < records.length; i++) {
|
||||
let newItem = `
|
||||
<ItemGroup id="" isShipUnit="false" isHandlingUnit="false" sequence="${
|
||||
i + 1
|
||||
}">
|
||||
<ContainedBy id=""/>
|
||||
<LineItem lineNumber="${i + 1}"/>
|
||||
<Dimensions>
|
||||
<Dimension type="Length" uom="IN">${(
|
||||
records[i].pkgLengh / 25.4
|
||||
).toFixed(2)}</Dimension>
|
||||
<Dimension type="Width" uom="IN">${(
|
||||
records[i].pkgWidth / 25.4
|
||||
).toFixed(2)}</Dimension>
|
||||
<Dimension type="Height" uom="IN">${Math.round(
|
||||
records[i].pkgHeight / 25.4
|
||||
).toFixed(2)}</Dimension>
|
||||
</Dimensions>
|
||||
<Description>${`av ${records[i].article} ${records[i].articleAlias}`}</Description>
|
||||
<FreightClasses>
|
||||
<FreightClass type="">${freightClass(
|
||||
records[i].pkgWeight,
|
||||
records[i].pkgLengh,
|
||||
records[i].pkgWidth,
|
||||
records[i].pkgHeight
|
||||
)}</FreightClass>
|
||||
</FreightClasses>
|
||||
<Commodity/>
|
||||
<NmfcCode/>
|
||||
<HazardousMaterial>false</HazardousMaterial>
|
||||
<HazMatDetail/>
|
||||
<Weights>
|
||||
<Weight type="actual" uom="KG">${
|
||||
records[i].pkgWeight * records[i].Pallets
|
||||
}</Weight>
|
||||
</Weights>
|
||||
<Quantities>
|
||||
<Quantity type="actual" uom="pallet">${
|
||||
records[i].Pallets
|
||||
}</Quantity>
|
||||
</Quantities>
|
||||
</ItemGroup>
|
||||
`;
|
||||
|
||||
itemGroups += newItem;
|
||||
}
|
||||
|
||||
// add the full amount of pallets sending over
|
||||
let fullPalToSend = records.reduce(
|
||||
(acc: any, o: any) => acc + o.Pallets,
|
||||
0
|
||||
);
|
||||
|
||||
// rebuild the xml to be properly
|
||||
let payload = xmlPayloadTI;
|
||||
payload = payload
|
||||
.replaceAll(`[WebImportHeader]`, webHeader)
|
||||
.replaceAll(`[items]`, itemGroups)
|
||||
.replaceAll(`[customerAccountNum]`, customerAccountNum)
|
||||
.replaceAll("[fullTotalPal]", fullPalToSend);
|
||||
|
||||
// update the main release
|
||||
//[loadNumber],[shipNumber]
|
||||
payload = payload.replaceAll(`[shipNumber]`, records[0].releaseNumber);
|
||||
payload = payload.replaceAll(`[loadNumber]`, records[0].releaseNumber);
|
||||
|
||||
// do the multie release if needed
|
||||
// <ReferenceNumber type="Release Number" isPrimary="false">[multieReleaseNumber]</ReferenceNumber>
|
||||
|
||||
let multiRelease = ``;
|
||||
if (records.length > 0) {
|
||||
for (let i = 0; i < records.length; i++) {
|
||||
const newRelease = `
|
||||
<ReferenceNumber type="Release Number" isPrimary="false">${records[i].releaseNumber}</ReferenceNumber>`;
|
||||
multiRelease += newRelease;
|
||||
}
|
||||
|
||||
payload = payload.replaceAll("[multieReleaseNumber]", multiRelease);
|
||||
} else {
|
||||
payload = payload.replaceAll("[multieReleaseNumber]", "");
|
||||
}
|
||||
|
||||
//update the delivery section
|
||||
payload = payload.replaceAll(
|
||||
"[loadingDate]",
|
||||
dateCorrection(records[0].LoadingDate)
|
||||
);
|
||||
|
||||
payload = payload.replaceAll(
|
||||
"[deliveryDate]",
|
||||
dateCorrection(records[0].DeliveryDate)
|
||||
);
|
||||
|
||||
// shipping hours
|
||||
//<Date type="earliest">[shippingHoursEarly]</Date>
|
||||
//<Date type="latest">[shippingHoursLate]</Date>
|
||||
|
||||
// update teh shipping hours
|
||||
|
||||
const now = new Date();
|
||||
const formattedDate = records[0].LoadingDate.toLocaleDateString("en-US", {
|
||||
month: "2-digit",
|
||||
day: "2-digit",
|
||||
year: "numeric",
|
||||
});
|
||||
|
||||
const shippingHours = JSON.parse(plantI[0]?.shippingHours!);
|
||||
//console.log(shippingHours);
|
||||
|
||||
payload = payload
|
||||
.replaceAll(
|
||||
"[shippingHoursEarly]",
|
||||
`${formattedDate} ${shippingHours[0].early}`
|
||||
)
|
||||
.replaceAll(
|
||||
"[shippingHoursLate]",
|
||||
`${formattedDate} ${shippingHours[0].late}`
|
||||
);
|
||||
|
||||
payload = payload
|
||||
.replaceAll("[plantName]", `Alpla ${plantI[0]?.sName!}`)
|
||||
.replaceAll("[plantStreetAddress]", plantI[0]?.streetAddress!)
|
||||
.replaceAll("[plantCity]", plantI[0]?.cityState!.split(",")[0])
|
||||
.replaceAll("[plantState]", plantI[0]?.cityState!.split(",")[1])
|
||||
.replaceAll("[plantZipCode]", plantI[0]?.zipcode!)
|
||||
.replaceAll("[contactNum]", plantI[0]?.contactPhone!)
|
||||
.replaceAll("[contactEmail]", plantI[0]?.contactEmail!)
|
||||
|
||||
// customer info
|
||||
.replaceAll("[customerName]", records[0].addressAlias)
|
||||
.replaceAll("[customerStreetAddress]", records[0].streetAddress)
|
||||
.replaceAll("[customerCity]", records[0].city.split(",")[0])
|
||||
.replaceAll("[customerState]", records[0].city.split(",")[1])
|
||||
.replaceAll("[customerZip]", records[0].zipCode)
|
||||
.replaceAll("[customerPO]", records[0].Header)
|
||||
.replaceAll(
|
||||
"[glCoding]",
|
||||
`52410-${
|
||||
records[0].artileType.toLowerCase() === "preform" ||
|
||||
records[0].artileType.toLowerCase() === "metalCage"
|
||||
? 31
|
||||
: plantI[0].greatPlainsPlantCode
|
||||
}`
|
||||
) // {"52410 - " + (artileType.toLowerCase() === "preform" || artileType.toLowerCase() === "metalCage" ? 31: plantInfo[0].greatPlainsPlantCode)}
|
||||
.replaceAll(
|
||||
"[pfc]",
|
||||
`${
|
||||
records[0].artileType.toLowerCase() === "preform" ||
|
||||
records[0].artileType.toLowerCase() === "metalCage"
|
||||
? 40
|
||||
: records[0].costCenter
|
||||
}`
|
||||
);
|
||||
|
||||
// special instructions
|
||||
if (otherSettings[0].specialInstructions.length != 0) {
|
||||
payload = payload.replaceAll("[specialInstructions]", specialInfo);
|
||||
}
|
||||
|
||||
// update the carrier info if any is needed.
|
||||
|
||||
// check the address has a real carrier on it and change to true and put the sacs code in
|
||||
const hasCarrier = true;
|
||||
|
||||
console.log(
|
||||
`Checking if ${records[0].addressAlias} has scac: ${
|
||||
records[0].remark.split(",")[0] ? "there was one" : "no scac"
|
||||
}`
|
||||
);
|
||||
|
||||
const priceSheet = `
|
||||
<PriceSheets>
|
||||
<PriceSheet type="Carrier" isSelected="${
|
||||
records[0].remark.split(",")[0] ? "true" : "false"
|
||||
}">
|
||||
<ContractId/>
|
||||
${
|
||||
records[0].remark.split(",")[0]
|
||||
? `<SCAC>${records[0].remark
|
||||
.split(",")[0]
|
||||
.split(":")[1]
|
||||
.toUpperCase()}</SCAC>`
|
||||
: `<SCAC/>`
|
||||
}
|
||||
<Mode/>
|
||||
</PriceSheet>
|
||||
</PriceSheets>
|
||||
`;
|
||||
|
||||
payload = payload.replaceAll("[priceSheet]", priceSheet);
|
||||
// console.log(payload);
|
||||
//await closePool();
|
||||
|
||||
//put the xml into a form
|
||||
const formBody = querystring.stringify({
|
||||
userid,
|
||||
password,
|
||||
request: payload,
|
||||
});
|
||||
axios
|
||||
.post(
|
||||
"https://t-insightws.mercurygate.net/MercuryGate/common/remoteService.jsp",
|
||||
formBody,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
}
|
||||
)
|
||||
.then((response) => {
|
||||
//console.log(response.data)
|
||||
console.log("Data was sent over to TI");
|
||||
})
|
||||
.catch((error) => console.error(error));
|
||||
|
||||
// console.log(payload);
|
||||
|
||||
// the order is done so we want to update the processed.
|
||||
|
||||
// add the new processed order to this
|
||||
let notiSettingArray = notiSet.notifiySettings;
|
||||
|
||||
if (
|
||||
!notiSettingArray[0].processed.includes(headerPending[0].releaseNumber)
|
||||
) {
|
||||
notiSettingArray[0].processed.push(headerPending[0].releaseNumber);
|
||||
}
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({
|
||||
lastRan: sql`NOW()`,
|
||||
notifiySettings: {
|
||||
...notiSettingArray,
|
||||
prodID: 1,
|
||||
},
|
||||
})
|
||||
.where(eq(notifications.name, "tiIntergration"))
|
||||
);
|
||||
createLog("info", "ti", "notify", "done with this order");
|
||||
return { success: true, code: 0, message: "done with this order" };
|
||||
};
|
||||
|
||||
// add a running check so we cant flag it twice
|
||||
export let tiExportRunning = false;
|
||||
export const runTiImport = async () => {
|
||||
let finished = false;
|
||||
let test: any;
|
||||
tiExportRunning = true;
|
||||
do {
|
||||
createLog("info", "ti", "notify", "processing new data");
|
||||
// code block to be executed
|
||||
test = await tiImport();
|
||||
createLog(
|
||||
"info",
|
||||
"ti",
|
||||
"notify",
|
||||
`Still more to process? ${test.code === 1 ? "No" : "Yes"}`
|
||||
);
|
||||
if (test.code === 1) {
|
||||
finished = true;
|
||||
}
|
||||
await delay(1000 * 5);
|
||||
} while (!finished);
|
||||
tiExportRunning = false;
|
||||
};
|
||||
|
||||
export default tiImport;
|
||||
149
server/services/notifications/controller/sendMail.ts
Normal file
149
server/services/notifications/controller/sendMail.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../../database/dbclient.js";
|
||||
import { settings } from "../../../../database/schema/settings.js";
|
||||
import nodemailer from "nodemailer";
|
||||
import type { Transporter } from "nodemailer";
|
||||
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
|
||||
import type Mail from "nodemailer/lib/mailer/index.js";
|
||||
import type { Address } from "nodemailer/lib/mailer/index.js";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import hbs from "nodemailer-express-handlebars";
|
||||
import { promisify } from "util";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
import { installed } from "../../../index.js";
|
||||
|
||||
interface HandlebarsMailOptions extends Mail.Options {
|
||||
template: string;
|
||||
context: Record<string, unknown>; // Use a generic object for context
|
||||
}
|
||||
|
||||
interface EmailData {
|
||||
email: string;
|
||||
subject: string;
|
||||
template: string;
|
||||
context: [];
|
||||
}
|
||||
|
||||
export const sendEmail = async (data: any): Promise<any> => {
|
||||
if (!installed) {
|
||||
createLog("error", "notify", "notify", "server not installed.");
|
||||
return;
|
||||
}
|
||||
let transporter: Transporter;
|
||||
let fromEmail: string | Address;
|
||||
const { data: settingData, error: settingError } = await tryCatch(
|
||||
db.select().from(settings)
|
||||
);
|
||||
|
||||
if (settingError) {
|
||||
return {
|
||||
success: false,
|
||||
message: "There was an error getting the settings.",
|
||||
settingError,
|
||||
};
|
||||
}
|
||||
// get the plantToken
|
||||
const server = settingData.filter((n) => n.name === "server");
|
||||
|
||||
if (
|
||||
server[0].value === "localhost" &&
|
||||
process.env.EMAIL_USER &&
|
||||
process.env.EMAIL_PASSWORD
|
||||
) {
|
||||
transporter = nodemailer.createTransport({
|
||||
service: "gmail",
|
||||
auth: {
|
||||
user: process.env.EMAIL_USER,
|
||||
pass: process.env.EMAIL_PASSWORD,
|
||||
},
|
||||
//debug: true,
|
||||
});
|
||||
|
||||
// update the from email
|
||||
fromEmail = process.env.EMAIL_USER;
|
||||
} else {
|
||||
// convert to the correct plant token.
|
||||
const plantToken = settingData.filter((s) => s.name === "plantToken");
|
||||
|
||||
let host = `${plantToken[0].value}-smtp.alpla.net`;
|
||||
|
||||
const testServers = ["test1", "test2", "test3"];
|
||||
|
||||
if (testServers.includes(plantToken[0].value)) {
|
||||
host = "USMCD1-smtp.alpla.net";
|
||||
}
|
||||
|
||||
if (plantToken[0].value === "usiow2") {
|
||||
host = "USIOW1-smtp.alpla.net";
|
||||
}
|
||||
|
||||
transporter = nodemailer.createTransport({
|
||||
host: host,
|
||||
port: 25,
|
||||
rejectUnauthorized: false,
|
||||
//secure: false,
|
||||
// auth: {
|
||||
// user: "alplaprod",
|
||||
// pass: "obelix",
|
||||
// },
|
||||
debug: true,
|
||||
} as SMTPTransport.Options);
|
||||
|
||||
// update the from email
|
||||
fromEmail = `noreply@alpla.com`;
|
||||
}
|
||||
|
||||
// creating the handlbar options
|
||||
const viewPath = path.resolve(
|
||||
path.dirname(fileURLToPath(import.meta.url)),
|
||||
"../utils/views/"
|
||||
);
|
||||
|
||||
const handlebarOptions = {
|
||||
viewEngine: {
|
||||
extname: ".hbs",
|
||||
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
|
||||
defaultLayout: "", // Specify the default layout
|
||||
partialsDir: viewPath,
|
||||
},
|
||||
viewPath: viewPath,
|
||||
extName: ".hbs", // File extension for Handlebars templates
|
||||
};
|
||||
|
||||
transporter.use("compile", hbs(handlebarOptions));
|
||||
|
||||
const mailOptions: HandlebarsMailOptions = {
|
||||
from: fromEmail,
|
||||
to: data.email,
|
||||
subject: data.subject,
|
||||
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
|
||||
//html: emailTemplate("BlakesTest", "This is an example with css"),
|
||||
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
|
||||
context: data.context,
|
||||
};
|
||||
|
||||
// now verify and send the email
|
||||
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
|
||||
|
||||
try {
|
||||
// Send email and await the result
|
||||
const info = await sendMailPromise(mailOptions);
|
||||
createLog(
|
||||
"info",
|
||||
"notification",
|
||||
"system",
|
||||
`Email was sent to: ${data.email}`
|
||||
);
|
||||
return { success: true, message: "Email sent.", data: info };
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
createLog(
|
||||
"error",
|
||||
"notification",
|
||||
"system",
|
||||
`Error sending Email: ${JSON.stringify(err)}`
|
||||
);
|
||||
return { success: false, message: "Error sending email.", error: err };
|
||||
}
|
||||
};
|
||||
61
server/services/notifications/notifyService.ts
Normal file
61
server/services/notifications/notifyService.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
|
||||
import sendemail from "./routes/sendMail.js";
|
||||
import { tryCatch } from "../../globalUtils/tryCatch.js";
|
||||
import { db } from "../../../database/dbclient.js";
|
||||
|
||||
import { notifications } from "../../../database/schema/notifications.js";
|
||||
import { createLog } from "../logger/logger.js";
|
||||
import { note, notificationCreate } from "./utils/masterNotifications.js";
|
||||
import { startNotificationMonitor } from "./utils/processNotifications.js";
|
||||
import notifyStats from "./routes/getActiveNotifications.js";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const routes = [sendemail, notifyStats] as const;
|
||||
|
||||
const appRoutes = routes.forEach((route) => {
|
||||
app.route("/notify", route);
|
||||
});
|
||||
|
||||
app.all("/notify/*", (c) => {
|
||||
return c.json({
|
||||
success: false,
|
||||
message: "you have encounted a notication route that dose not exist.",
|
||||
});
|
||||
});
|
||||
|
||||
// check if the mastNotications is changed compared to the db and add if needed.
|
||||
const { data: notes, error: notesError } = await tryCatch(
|
||||
db.select().from(notifications)
|
||||
);
|
||||
|
||||
if (notesError) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There was an error getting the notifications: ${JSON.stringify(
|
||||
notesError
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
if (note.length != notes?.length) {
|
||||
notificationCreate();
|
||||
createLog("info", "notify", "notify", `New notifcations being added.`);
|
||||
setTimeout(() => {
|
||||
startNotificationMonitor();
|
||||
}, 5 * 1000);
|
||||
} else {
|
||||
createLog(
|
||||
"info",
|
||||
"notify",
|
||||
"notify",
|
||||
`There are know new notifcations. no need to run the update. reminder all changes happen per server.`
|
||||
);
|
||||
setTimeout(() => {
|
||||
startNotificationMonitor();
|
||||
}, 5 * 1000);
|
||||
}
|
||||
export default app;
|
||||
@@ -0,0 +1,29 @@
|
||||
// an external way to creating logs
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
import { getAllJobs } from "../utils/processNotifications.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["server"],
|
||||
summary: "Returns current active notifications.",
|
||||
method: "get",
|
||||
path: "/activenotifications",
|
||||
//middleware: authMiddleware,
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
const jobs = getAllJobs();
|
||||
return c.json({
|
||||
success: true,
|
||||
message:
|
||||
jobs.length === 0
|
||||
? "There are no active Notifications Currently."
|
||||
: "Current Active notifications",
|
||||
data: jobs,
|
||||
});
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
22
server/services/notifications/routes/manualTiggerTi.ts
Normal file
22
server/services/notifications/routes/manualTiggerTi.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
// import {Router} from "express";
|
||||
// import {tiExportRunning, runTiImport} from "../../notification/notification/tiFullFlow/tiImports.js";
|
||||
|
||||
// const router = Router();
|
||||
|
||||
// router.get("/tiTrigger", async (req, res): Promise<void> => {
|
||||
// if (tiExportRunning) {
|
||||
// res.status(200).json({
|
||||
// success: false,
|
||||
// message: "There is already a current sesion of the Export running please try again later.",
|
||||
// });
|
||||
// }
|
||||
|
||||
// // trigger the import
|
||||
// runTiImport();
|
||||
|
||||
// res.status(200).json({
|
||||
// success: true,
|
||||
// message: "The Ti Export has been manually started and will continue to run in the background.",
|
||||
// });
|
||||
// });
|
||||
// export default router;
|
||||
73
server/services/notifications/routes/sendMail.ts
Normal file
73
server/services/notifications/routes/sendMail.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
// an external way to creating logs
|
||||
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
|
||||
import { responses } from "../../../globalUtils/routeDefs/responses.js";
|
||||
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
|
||||
import { sendEmail } from "../controller/sendMail.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
|
||||
const EmailSchema = z
|
||||
.object({
|
||||
email: z.string().email().openapi({ example: "smith@example.come" }),
|
||||
subject: z.string().openapi({ example: "Welcome to LST" }),
|
||||
template: z.string().openapi({ example: "exampleTemplate" }),
|
||||
context: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
score: z.string().optional(),
|
||||
})
|
||||
.optional()
|
||||
.openapi({}),
|
||||
})
|
||||
.openapi("User");
|
||||
app.openapi(
|
||||
createRoute({
|
||||
tags: ["server"],
|
||||
summary: "Returns current active lots that are tech released",
|
||||
method: "post",
|
||||
path: "/sendmail",
|
||||
middleware: authMiddleware,
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
"application/json": { schema: EmailSchema },
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: responses(),
|
||||
}),
|
||||
async (c) => {
|
||||
const { data: bodyData, error: bodyError } = await tryCatch(
|
||||
c.req.json()
|
||||
);
|
||||
if (bodyError) {
|
||||
return c.json(
|
||||
{
|
||||
success: false,
|
||||
message: "There was an error sending the email",
|
||||
data: bodyError,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
const { data: emailData, error: emailError } = await tryCatch(
|
||||
sendEmail(bodyData)
|
||||
);
|
||||
|
||||
if (emailError) {
|
||||
return c.json({
|
||||
success: false,
|
||||
message: "There was an error sending the email",
|
||||
data: emailError,
|
||||
});
|
||||
}
|
||||
|
||||
return c.json({
|
||||
success: emailData.success,
|
||||
message: emailData.message,
|
||||
data: emailData.data,
|
||||
});
|
||||
}
|
||||
);
|
||||
export default app;
|
||||
113
server/services/notifications/utils/masterNotifications.ts
Normal file
113
server/services/notifications/utils/masterNotifications.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { db } from "../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../database/schema/notifications.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
|
||||
export const note: any = [
|
||||
{
|
||||
name: "reprintLabels",
|
||||
description:
|
||||
"Monitors the labels that are printed and returns a value if one falls withing the time frame defined below.",
|
||||
checkInterval: 1,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1 },
|
||||
},
|
||||
{
|
||||
name: "downTimeCheck",
|
||||
description:
|
||||
"Checks for specific downtimes that are greater than 105 min.",
|
||||
checkInterval: 30,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1, daysInPast: 5, duration: 105 },
|
||||
},
|
||||
{
|
||||
name: "qualityBlocking",
|
||||
description:
|
||||
"Checks for new blocking orders that have been entered, recommened to get the most recent order in here before activating.",
|
||||
checkInterval: 30,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: { prodID: 1, sentBlockingOrders: [1] },
|
||||
},
|
||||
{
|
||||
name: "productionCheck",
|
||||
description: "Checks ppoo",
|
||||
checkInterval: 2,
|
||||
timeType: "hour",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
count: 0,
|
||||
weekend: false,
|
||||
locations: "0",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "stagingCheck",
|
||||
description:
|
||||
"Checks staging based on locations, locations need to be seperated by a ,",
|
||||
checkInterval: 2,
|
||||
timeType: "hour",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
count: 0,
|
||||
weekend: false,
|
||||
locations: "0",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tiIntergration",
|
||||
description: "Checks for new releases to be put into ti",
|
||||
checkInterval: 2,
|
||||
timeType: "hour",
|
||||
emails: "",
|
||||
active: false,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 720,
|
||||
releases: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "exampleNotification",
|
||||
description: "Checks for new releases to be put into ti",
|
||||
checkInterval: 2,
|
||||
timeType: "min",
|
||||
emails: "",
|
||||
active: true,
|
||||
notifiySettings: {
|
||||
prodID: 1,
|
||||
start: 36,
|
||||
end: 720,
|
||||
releases: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
export const notificationCreate = async () => {
|
||||
for (let i = 0; i < note.length; i++) {
|
||||
try {
|
||||
const notify = await db
|
||||
.insert(notifications)
|
||||
.values(note[i])
|
||||
.onConflictDoNothing();
|
||||
} catch (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There was an error getting the notifications: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
158
server/services/notifications/utils/processNotifications.ts
Normal file
158
server/services/notifications/utils/processNotifications.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import { db } from "../../../../database/dbclient.js";
|
||||
import { notifications } from "../../../../database/schema/notifications.js";
|
||||
import { tryCatch } from "../../../globalUtils/tryCatch.js";
|
||||
import { createLog } from "../../logger/logger.js";
|
||||
import { Cron } from "croner";
|
||||
|
||||
// Store active timeouts by notification ID
|
||||
export let runningNotifications: Record<string, Cron> = {};
|
||||
|
||||
export const startNotificationMonitor = async () => {
|
||||
// if restarted or crashed we need to make sure the running notifications is cleared
|
||||
createLog("info", "notify", "notify", `Notification system is now active.`);
|
||||
|
||||
setInterval(async () => {
|
||||
const { data, error } = await tryCatch(db.select().from(notifications));
|
||||
|
||||
if (error) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
"notify",
|
||||
`There was an error getting the notifications: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
const notes: any = data;
|
||||
|
||||
for (const note of notes) {
|
||||
//if we get deactivated remove it.
|
||||
if (runningNotifications[note.name] && !note.active) {
|
||||
createLog(
|
||||
"info",
|
||||
"notify",
|
||||
"notify",
|
||||
`${note.name} was just deactivated`
|
||||
);
|
||||
removeNotification(note.name);
|
||||
}
|
||||
|
||||
// if we are not active, no emails, and already in place just stop.
|
||||
|
||||
if (
|
||||
!note.active ||
|
||||
note.emails === "" ||
|
||||
runningNotifications[note.name]
|
||||
) {
|
||||
//console.log(`Skipping ${note.name} hes already scheduled`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let time = `*/30 * * * *`; // default to be every 30 min
|
||||
|
||||
if (note.timeType === "min") {
|
||||
console.log(`Creating the min mark here`);
|
||||
time = `*/${note.checkInterval} * * * *`;
|
||||
}
|
||||
|
||||
if (note.timeType === "hour") {
|
||||
console.log(`Creating the hour mark here`);
|
||||
time = `* */${note.checkInterval} * * *`;
|
||||
}
|
||||
|
||||
createJob(note.name, time, async () => {
|
||||
try {
|
||||
const { default: runFun } = await import(
|
||||
`../controller/notifications/${note.name}.js`
|
||||
);
|
||||
await runFun(note);
|
||||
} catch (error: any) {
|
||||
createLog(
|
||||
"error",
|
||||
"notify",
|
||||
note.name,
|
||||
`Error running notification: ${error.message}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
//testParse(runningNotifcations[note.name]);
|
||||
}
|
||||
}, 5 * 1000);
|
||||
};
|
||||
|
||||
const createJob = (id: string, schedule: string, task: () => Promise<void>) => {
|
||||
// Destroy existing job if it exists
|
||||
if (runningNotifications[id]) {
|
||||
runningNotifications[id].stop(); // Croner uses .stop() instead of .destroy()
|
||||
}
|
||||
|
||||
// Create new job with Croner
|
||||
runningNotifications[id] = new Cron(
|
||||
schedule,
|
||||
{
|
||||
timezone: "America/Chicago",
|
||||
catch: true, // Prevents unhandled rejections
|
||||
},
|
||||
task
|
||||
);
|
||||
|
||||
// Optional: Add error handling (Croner emits 'error' events)
|
||||
// runningNotifications[id].on("error", (err) => {
|
||||
// console.error(`Job ${id} failed:`, err);
|
||||
// });
|
||||
};
|
||||
|
||||
interface JobInfo {
|
||||
id: string;
|
||||
schedule: string;
|
||||
nextRun: Date | null;
|
||||
isRunning: boolean;
|
||||
}
|
||||
|
||||
export const getAllJobs = (): JobInfo[] => {
|
||||
return Object.entries(runningNotifications).map(([id, job]) => ({
|
||||
id,
|
||||
schedule: job.getPattern() || "invalid",
|
||||
nextRun: job.nextRun() || null,
|
||||
lastRun: job.previousRun() || null,
|
||||
isRunning: job ? !job.isStopped() : false,
|
||||
}));
|
||||
};
|
||||
|
||||
const removeNotification = (id: any) => {
|
||||
if (runningNotifications[id]) {
|
||||
runningNotifications[id].stop();
|
||||
delete runningNotifications[id];
|
||||
}
|
||||
};
|
||||
|
||||
export const stopAllJobs = () => {
|
||||
Object.values(runningNotifications).forEach((job: any) => job.stop());
|
||||
runningNotifications = {}; // Clear the object
|
||||
};
|
||||
|
||||
/*
|
||||
// Pause a job
|
||||
app.post("/api/jobs/:id/pause", (req, res) => {
|
||||
runningNotifications[req.params.id]?.pause();
|
||||
res.json({ success: true });
|
||||
});
|
||||
|
||||
// Resume a job
|
||||
app.post("/api/jobs/:id/resume", (req, res) => {
|
||||
runningNotifications[req.params.id]?.resume();
|
||||
res.json({ success: true });
|
||||
});
|
||||
|
||||
// Delete a job
|
||||
app.delete("/api/jobs/:id", (req, res) => {
|
||||
runningNotifications[req.params.id]?.stop();
|
||||
delete runningNotifications[req.params.id];
|
||||
res.json({ success: true });
|
||||
});
|
||||
|
||||
|
||||
*/
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user