Compare commits

...

81 Commits

Author SHA1 Message Date
dcfa56bdb9 fix(notify): fixed to plantto plant that would cause multiple emails to be sent and never update 2026-03-11 15:30:53 -05:00
ea92422bb1 feat(notification): plant to plant edi 2026-03-10 08:18:29 -05:00
2111a5fdc9 refactor(sql): changes to improve the job disable 2026-02-18 08:53:19 -06:00
6edd20585f refactor(stats): added in mastermacro version 2026-02-18 08:52:29 -06:00
a9759795c4 fix(scripts): changed the fake scanning to be more readable 2026-02-16 19:15:32 -06:00
32f26a1725 fix(quality): hoping to finally find the bud that kills me everynight 2026-02-16 19:05:41 -06:00
60533beed5 feat(sql): job disabling scripts 2026-02-16 19:05:08 -06:00
24ced97b6d feat(notification): added cycle count check 2026-02-16 19:04:38 -06:00
dc1d342799 fix(scanner): if host or port not sent over stop the connection right away 2026-02-16 19:04:04 -06:00
44d0cb63cf refactor(sql): moved new queries to there own folder to make it more easy to work and migrate 2026-02-16 19:01:23 -06:00
ace73fa919 refactor(sendmail): updated the smtp per alpla needs 2026-02-16 18:59:12 -06:00
316af4233f refactor(stats): added sheet version check in 2026-02-16 18:58:17 -06:00
36a805c652 refactor(scripts): create finance bol 2026-02-16 09:40:19 -06:00
460bc3d24a feat(query selector): queryselector from file based vs cp to ts filesz 2026-02-16 09:40:00 -06:00
ec201fcfb5 refactor(sql): full changes to localhost if on produciton server 2026-02-16 09:39:35 -06:00
914ad46c43 refactor(sql server): changes to look at localhost if in production 2026-02-16 09:38:55 -06:00
b96c546ed3 refactor(notify): changed to only allow max 100 errors in the email 2026-02-16 09:38:31 -06:00
29b3be41a1 build(notification): fixed fifo index ts errors 2026-02-16 09:38:10 -06:00
16edf58025 refactor(eom): changes to hist inv 2026-02-16 09:37:40 -06:00
775627f215 feat(scanner): tcp scanner connection based on env var no more db stuff 2026-02-16 09:37:14 -06:00
4e70fae69b refactor(api docs): added/changed docs 2026-02-16 09:36:44 -06:00
24dd109a21 fix(commandlog): changes to the log table 2026-02-13 16:08:23 -06:00
38b57a00cc refactor(datamart): article changes to add pet-g 2026-02-13 16:03:26 -06:00
f8070db95f fix(sqlserver): changed to proper pool connection 2026-02-13 16:02:43 -06:00
10e9dc430c fix(notification): limited to 1000 max errors 2026-02-13 15:59:38 -06:00
6b669ccd9c fix(labelinfo): corrected the query on label info for external 2026-02-13 14:51:15 -06:00
d9a10d98a1 refactor(sendmail): change the send mail function from noreply to donotreply 2026-02-13 14:50:44 -06:00
e64dc7c013 refactor(ocp): removed zechetti 2 from this silly thing for now 2026-02-13 14:50:07 -06:00
d63138d746 helper scripts 2026-02-03 15:40:51 -06:00
84a28f2d01 added relocate 2026-02-03 15:40:41 -06:00
9be6614972 fix(ocp): more material check work 2026-01-19 07:50:27 -06:00
9d0db71f6a fix(datamart): psiPlanning was looking at thertical but a rare case this would look at last years da
and be missed and not pull correct data, switched to plan end
2026-01-15 13:28:18 -06:00
3cc55436f3 refactor(psi): old planning numbers to revert back to maybe 2026-01-08 20:11:40 -06:00
124fde07e0 refactor(psi): planning numbers refactored to deal with a bad downsync that caused negative numbers 2026-01-08 20:08:31 -06:00
b15d0d7322 refactor(datamart): delivery by date range updates 2026-01-08 20:08:03 -06:00
0680f332fb refactor(manual print): added new option for mulitple tags on pallet 2026-01-08 20:07:28 -06:00
46bf310dce refactor(datamart): changed the getDelbyDateRange to the new 2.0 way 2026-01-05 15:17:15 -06:00
0dda6ae744 fix(notifications): fixed a type in the alert being sent out only showing 1 alert instead of many 2026-01-05 10:27:09 -06:00
1b59cdd3a4 fix(psi): correcrtions to account for the tiem offset in the psi 2026-01-02 11:07:45 -06:00
56934216f7 fix(sql): fix connection issues in the sql connection loosing it 2025-12-31 07:54:26 -06:00
e8a2ef8b85 refactor(ocp): plc reading changes to disconnect and reconnect
it was found that there were some errors that spammed the log and caused the server to actually stop
responding and crash weirdly so added a disconnect and reconnect back. so we can figure out whats
going on.
2025-12-30 10:55:28 -06:00
6cbffa4ac5 feat(notification): error monitoring
if there are more than 10 errors in a 15min window sends email to alert someone
2025-12-30 10:54:09 -06:00
09f16f4e62 refactor(datamart): added in 2 new queroes 2025-12-30 08:43:03 -06:00
461acb2b16 fix(datamart): removed limitation on inhousedelivery 2025-12-30 07:21:40 -06:00
0d05c66a2b fix(quality): changes to the logging to reduce un nessasary logging 2025-12-30 07:13:20 -06:00
096cc18477 refactor(datamart): psi work 2025-12-30 07:12:38 -06:00
f3333ce020 fix(dm): abbott truck time corrections 2025-12-18 08:47:12 -06:00
8e3d2b3d95 fix(dm): new scj custom mapping added for westbend 2025-12-12 10:53:33 -06:00
501709546d feat(dm): abbott trucklist will do orders and forecast now 2025-12-11 15:56:34 -06:00
2b5e77993b test(dm): starts on abbot forecast to be brought over from the original truck list 2025-12-11 14:43:50 -06:00
6efaffbb17 fix(printers): add null case for the printerID 2025-12-11 14:43:22 -06:00
90ddbca2e7 fix(rfid): changes to log all reads and also replace some trailing text from a hex 2025-12-11 14:42:51 -06:00
7a9ea16f48 fix(dm): energizer forecast to correct the date and qty push over 2025-12-11 14:42:22 -06:00
420826de9b fix(frontend): transfer lots style adjustment 2025-12-11 14:41:48 -06:00
dc2d3718fa fix(dm): energizer orders missing remark 2025-12-11 14:41:22 -06:00
5013228384 feat(inhouse): delivery by pallet and lot added 2025-12-08 13:16:23 -06:00
4459742cf0 feat(datamart): inhouse delivery added 2025-12-08 13:15:59 -06:00
070c3ee975 fix(mainmaterial): added missing return 2025-12-08 13:15:37 -06:00
8ac92888ad test(mobile): keyence added 2025-12-08 08:37:25 -06:00
567579ef35 fix(transfers): corrected to be a int vs float 2025-12-08 08:37:07 -06:00
8d90f27514 fix(rfid): corrected the link to reset 2025-12-08 08:36:47 -06:00
722b23a321 fix(rfid): changed the tag reading to have a little more flexable 2025-12-08 08:36:25 -06:00
ba48c5307f style(transfer to next lot): spelling issue 2025-12-08 08:35:22 -06:00
30d2ec0477 fix(materials): if no machine stop the check and fail so it manual needs tried again 2025-12-08 08:34:52 -06:00
d3c6444491 chore(release): 1.9.0 2025-12-02 18:30:54 -06:00
12345c0b64 fix(commands): corrections to allow external labels to be consumed and transfered 2025-12-02 18:29:30 -06:00
6833dfc992 refactor(quality): added a check to monior #7 as well 2025-12-02 15:25:05 -06:00
ac27a286c0 fix(lot transfer): changes to make it so the reprint and return do not happen instantly 2025-12-02 15:24:10 -06:00
a3dba6cc9d refactor(helpercommands): removed the remove as reusabele 2025-12-02 15:22:02 -06:00
320dd47aea refactor(sql): some changes to help with sql connection on random disconnect 2025-12-02 15:21:34 -06:00
712a6eebdf fix(lstv2): added in a close function to stop crashing the server 2025-11-30 10:31:27 -06:00
f226c5644c fix(dm): type in customer article number 2025-11-30 10:30:54 -06:00
d605225e48 refactor(swagger): corrected the name displaced 2025-11-26 08:53:05 -06:00
8e7f1eb098 feat(datamart): active article moved over to the new version 2025-11-26 08:52:28 -06:00
59c6fd0117 fix(labeling): added in a catch to avoid rouge lots 2025-11-26 08:29:33 -06:00
2607fd3026 feat(swagger): added in the start of swagger where all the common and useable endpoints will be 2025-11-25 17:02:20 -06:00
bdb4bfc53d chore(module updates): just updated all the modules 2025-11-25 16:12:12 -06:00
c1816c07ff refactor(contorller): only install npm production modules dont install everything 2025-11-25 16:11:41 -06:00
7311372ba8 feat(dm): added article description into the historical data 2025-11-25 14:58:47 -06:00
cd53460bec chore(release): 1.8.0 2025-11-25 14:36:45 -06:00
7e15e5d7bc feat(settings): final migration of settings and edits added 2025-11-25 14:36:06 -06:00
148 changed files with 13659 additions and 5456 deletions

View File

@@ -49,12 +49,15 @@
"go.formatTool": "goimports",
"cSpell.words": [
"acitve",
"actaully",
"alpla",
"alplamart",
"alplaprod",
"autoconsume",
"intiallally",
"ppoo",
"prodlabels"
"prodlabels",
"rfid"
],
"gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db",
"gitea.instanceURL": "https://git.tuffraid.net",

View File

@@ -1,5 +1,138 @@
# All Changes to LST can be found below.
## [1.9.0](https://git.tuffraid.net/cowch/lst/compare/v1.8.0...v1.9.0) (2025-12-03)
### 📝 Chore
* **module updates:** just updated all the modules ([bdb4bfc](https://git.tuffraid.net/cowch/lst/commits/bdb4bfc53d24f37f0e7098ea828cf418d58d5224))
### 🌟 Enhancements
* **datamart:** active article moved over to the new version ([8e7f1eb](https://git.tuffraid.net/cowch/lst/commits/8e7f1eb09811fcf3ea49b95b0ba9a8f55b9c4184))
* **dm:** added article description into the historical data ([7311372](https://git.tuffraid.net/cowch/lst/commits/7311372ba8eb901b51972ca216152bcfc2b009af))
* **swagger:** added in the start of swagger where all the common and useable endpoints will be ([2607fd3](https://git.tuffraid.net/cowch/lst/commits/2607fd3026ed0b5777a5598aa3498ffc67baa012))
### 🛠️ Code Refactor
* **contorller:** only install npm production modules dont install everything ([c1816c0](https://git.tuffraid.net/cowch/lst/commits/c1816c07ff5ac939b0997d314a9da624a4a66b7a))
* **helpercommands:** removed the remove as reusabele ([a3dba6c](https://git.tuffraid.net/cowch/lst/commits/a3dba6cc9db147ff4765fef648867e50878a6ac8))
* **quality:** added a check to monior [#7](https://git.tuffraid.net/cowch/lst/issues/7) as well ([6833dfc](https://git.tuffraid.net/cowch/lst/commits/6833dfc9929741203083b01726b83a6c8d61d308))
* **sql:** some changes to help with sql connection on random disconnect ([320dd47](https://git.tuffraid.net/cowch/lst/commits/320dd47aea017b4ff219b07e363ef87ec8523b82))
* **swagger:** corrected the name displaced ([d605225](https://git.tuffraid.net/cowch/lst/commits/d605225e48bca66f915ce0db448aa61933891986))
### 🐛 Bug fixes
* **commands:** corrections to allow external labels to be consumed and transfered ([12345c0](https://git.tuffraid.net/cowch/lst/commits/12345c0b6442c3abd309f660bb43216def9abb89))
* **dm:** type in customer article number ([f226c56](https://git.tuffraid.net/cowch/lst/commits/f226c5644cc2b93b9d967962bd6f82b3e506c8c0))
* **labeling:** added in a catch to avoid rouge lots ([59c6fd0](https://git.tuffraid.net/cowch/lst/commits/59c6fd011728dff50bfa3233d6095c396d0b1999))
* **lot transfer:** changes to make it so the reprint and return do not happen instantly ([ac27a28](https://git.tuffraid.net/cowch/lst/commits/ac27a286c07733333703d8421cfa525691363e54))
* **lstv2:** added in a close function to stop crashing the server ([712a6ee](https://git.tuffraid.net/cowch/lst/commits/712a6eebdfef0ce2b99155d23422ddc7e5e0daad))
## [1.8.0](https://git.tuffraid.net/cowch/lst/compare/v1.7.0...v1.8.0) (2025-11-25)
### 📝 Chore
* **misc:** api doc dates and db changes ([52089ec](https://git.tuffraid.net/cowch/lst/commits/52089ecdf06d9e5983afb6a24953d066a0456949))
### 📈 Project changes
* **settings:** changes to vs code as it was being way to slow ([5277ddf](https://git.tuffraid.net/cowch/lst/commits/5277ddfc51632651059e35b799ddf2a63d70cb70))
### 📝 Testing Code
* **android app:** this is the start to the android app ([7b630d5](https://git.tuffraid.net/cowch/lst/commits/7b630d5c0b1175f06c866b1238754e455d3a27c9))
* **controller:** added in server port that might night be needed but oh well ([efdab5b](https://git.tuffraid.net/cowch/lst/commits/efdab5bafca2a46d73de7ab1d776d283d5294aa3))
* **materials per day:** work on getting this running better ([a30eebf](https://git.tuffraid.net/cowch/lst/commits/a30eebf5d34c77c6c7118faf01776651f8888547))
* **mobile:** more testing on ota ([75c0659](https://git.tuffraid.net/cowch/lst/commits/75c0659658cbbb7e983df5538538d6429a325379))
* **mobile:** stallion intergration starts ([20fc286](https://git.tuffraid.net/cowch/lst/commits/20fc2860690f59851ca1e0251df84854c9d2ba01))
* **mobile:** testing for ota updated on android scanner ([314ab04](https://git.tuffraid.net/cowch/lst/commits/314ab049bb650120489259e920e52fd530f0ce41))
* **ti intergration:** added the place holder for intergration of comments for customer ([f8cf085](https://git.tuffraid.net/cowch/lst/commits/f8cf0851a86a6923a8941774efe17f93cb92e984))
### 🛠️ Code Refactor
* **all server stats:** added a 5 second time out if it dose not reach it just stops ([2133b94](https://git.tuffraid.net/cowch/lst/commits/2133b94a1dd84e16bb5a4b8fe215739a4b355223))
* **app:** refactored how we have the pkg.json so we can reduce on size of the app ([90920e8](https://git.tuffraid.net/cowch/lst/commits/90920e8fba4757297e0e42e86f80d5f14434a48e))
* **base modules:** removed the log spam ([6d27a7a](https://git.tuffraid.net/cowch/lst/commits/6d27a7aa6395a094d8763ba3fde0bdb81a7e3082))
* **cards:** trying to make the cards fit the odd shaped screens better ([33cbb17](https://git.tuffraid.net/cowch/lst/commits/33cbb17a0ede136a9be96e47ba0a7a66468b1ebc))
* **forklifts:** more refactoring to improve during production ([8c0f67c](https://git.tuffraid.net/cowch/lst/commits/8c0f67ca351778405279f7e225ee8dae654033f9))
* **inv with rn:** now includes batch number for tetra ([1084ced](https://git.tuffraid.net/cowch/lst/commits/1084cede04d43ec2b2c22c43c6e701bad4701981))
* **leases:** removed main server until i have a better way to sync them ([6ce4d84](https://git.tuffraid.net/cowch/lst/commits/6ce4d84fd00fa446ccb7d1bbad28680f045fae52))
* **ocp page:** using the name of the url now vs the settings ([d406a92](https://git.tuffraid.net/cowch/lst/commits/d406a92f3d5d6a8902164e9182717912debae804))
* **ocp:** work around for zechetti 2 until we can monitor more closing ([700346d](https://git.tuffraid.net/cowch/lst/commits/700346d80972e464d0a9ba62bba4dc0ed949cdee))
* **quality:** added some new options plus cancel button ([242ff62](https://git.tuffraid.net/cowch/lst/commits/242ff6277a1f407fbed2951d30dd6cf1ee32dd60))
* **quality:** more changes to the system to perfect it ([7b28f4e](https://git.tuffraid.net/cowch/lst/commits/7b28f4e9ef32a4fc90a0b4b16953b6cead096cac))
* **serverlist:** refactored to also show uptime and other info about the server ([e1e659f](https://git.tuffraid.net/cowch/lst/commits/e1e659f9b14f22474f919350f07b02b45141aa63))
* **types:** moved the item type to the sidebar to keep it more clean ([5023d4d](https://git.tuffraid.net/cowch/lst/commits/5023d4d129737cf6e0609592e5606a20a0f3728b))
* **wrapper:** removed the logs so its not spamming the server ([b8a9aa5](https://git.tuffraid.net/cowch/lst/commits/b8a9aa5132c7606fcccae8f058a77a11a8ed552a))
### 🐛 Bug fixes
* **comments:** added new role to put comments in ([1283a63](https://git.tuffraid.net/cowch/lst/commits/1283a63b5fd71fb44f7ec7789f670f8af7eafbb8))
* **correction:** the name needed to be correct to have a proper tempalte ([b1c56ee](https://git.tuffraid.net/cowch/lst/commits/b1c56ee4bb32c0dbf86e0164614fb3f1ecaf262d))
* **dm:** correction to passing the username over for the importing of the file ([a7a9aa2](https://git.tuffraid.net/cowch/lst/commits/a7a9aa2874ddd1391b56983db51cfabd8e789213))
* **dm:** fixes to validate auth before submiting incase someone stays on the page to long ([fef0303](https://git.tuffraid.net/cowch/lst/commits/fef0303cd6fdc9cc8cf9f9f4ad674a8b725691f3))
* **file name wrong:** fixed the name of the file for getCompanies ([8996da7](https://git.tuffraid.net/cowch/lst/commits/8996da7eb46a8b5bdfe44ee74a676e701d64fdbf))
* **forecast table:** correction to the customer article number ([ebe5c0b](https://git.tuffraid.net/cowch/lst/commits/ebe5c0bd5a883b1cbe87f7f9932fd803e80e7fae))
* **historical date:** added so we can have all dates ([9d793d2](https://git.tuffraid.net/cowch/lst/commits/9d793d22051c585ed224bfaf16e2a9b60bb02635))
* **historical inv:** corrected the way the date can come over to allow for yyyy-mm-dd or with / ([7c40f02](https://git.tuffraid.net/cowch/lst/commits/7c40f028c88d7fd78ac8ab75c172d808783fc641)), closes [#1](https://git.tuffraid.net/cowch/lst/issues/1)
* **historicalinv:** removed the second running one that caused duplicates ([a6cc17c](https://git.tuffraid.net/cowch/lst/commits/a6cc17ccb12b0d99ffdb1d371c5daf3bbb91f7ba))
* **label ratio:** correction to the endpoint ([50b7c9c](https://git.tuffraid.net/cowch/lst/commits/50b7c9cac5cd6923b08a8705fc8cb41530ec5b02))
* **manual print:** fixed so the print disables ([d3e8e94](https://git.tuffraid.net/cowch/lst/commits/d3e8e941103dc0118066e8790e7c27e5f035a6c5))
* **nav:** added missing add card button ([8fca201](https://git.tuffraid.net/cowch/lst/commits/8fca201e0463aba7ecace61f8dfb737e2acf4140))
* **ocp:** made corrections to the ocp page in dayton ([92af726](https://git.tuffraid.net/cowch/lst/commits/92af7262f60514501b903f5307d34e9154cc9034))
* **plc zeccetti:** changes to improve the timing on the zecetti more to be done ([247010d](https://git.tuffraid.net/cowch/lst/commits/247010d48f10ebb02a1b98c5df101134e8dab250))
* **preprint:** added the correct to string for the preprint stuff ([360c016](https://git.tuffraid.net/cowch/lst/commits/360c0163f1d3135d9c1c3788ac53dc8e0757c441))
* **prodendpoint:** if we have a real error just report it dont actually crash ([3193e07](https://git.tuffraid.net/cowch/lst/commits/3193e07e4707d055517b15f77ac117fefe07de12))
* **quality request:** bug fixes ([6f632ec](https://git.tuffraid.net/cowch/lst/commits/6f632ecd6831456c6e3c9973bc0ce7feb229aeec))
* **quality:** corrected url and perms for quality link ([eb6b9ce](https://git.tuffraid.net/cowch/lst/commits/eb6b9ce388c5dea35f95a9403765e7d330b664f9))
* **quality:** request was missing the forced selection ([96c3e4c](https://git.tuffraid.net/cowch/lst/commits/96c3e4c24adbdc59d11f7ea43888e1c47d061f90))
* **register:** added the ability to put in _ for user name ([aba1668](https://git.tuffraid.net/cowch/lst/commits/aba1668d2cab63a031657fb7c9f2bfb9777fa72a))
* **servers:** changed the server name to be unique ([a7bde5e](https://git.tuffraid.net/cowch/lst/commits/a7bde5e4eb41c597f94302dd2d119f7048c18a6f))
* **silo adjustments:** added supervisor to see the actual page so it matches the sidbard ([9aa0b31](https://git.tuffraid.net/cowch/lst/commits/9aa0b31278e5f8201acd21774f19ba69709a654d))
* **silo commits:** added in email that was now missing due to new authj ([25a958d](https://git.tuffraid.net/cowch/lst/commits/25a958d592d189f896ae0b5f7608d80a6ee2b1e7))
* **uom:** correction to how we do the uom check so we dont just look for the space ([ca866bf](https://git.tuffraid.net/cowch/lst/commits/ca866bf8c63e0576e890367d24a47c7ab46cc864))
### 🌟 Enhancements
* **added in swagger:** added the base for swagger to implement fully later ([9d9ca63](https://git.tuffraid.net/cowch/lst/commits/9d9ca63d7c9ab3e3ea168cf2add9c7baf2b9ed15))
* **articles:** moved articles over to the main server ([2a6eafa](https://git.tuffraid.net/cowch/lst/commits/2a6eafa19a97f0be01f63c68b63b4abfc4de1409))
* **barcode gen:** added the missing link ([b4064e8](https://git.tuffraid.net/cowch/lst/commits/b4064e87691937ad9f99441767b556a167b91055))
* **db manual fixes:** added a way to fix manual db changes as needed ([0b02984](https://git.tuffraid.net/cowch/lst/commits/0b0298423ed75eed6d112a04dda998b8a23b20ea))
* **dm:** new endpoint to get the forecast data ([a96b85b](https://git.tuffraid.net/cowch/lst/commits/a96b85bc536809d223dd7a29150d1a4d632e80da))
* **forecast data:** added in a historical forecast data set ([c2ae445](https://git.tuffraid.net/cowch/lst/commits/c2ae445ea4d26b047a2ee5d16041ed230f7b2061))
* **forklifts:** added backend forklift stuff and frontend companies ([50cde2d](https://git.tuffraid.net/cowch/lst/commits/50cde2d8d2aa24796db1f1c0126ef8c373614d5d))
* **forklifts:** added the ability to add new forklifts in ([7b6c9bd](https://git.tuffraid.net/cowch/lst/commits/7b6c9bdfbf2cf9d97c8e23d8ebd6523e32284963))
* **forklifts:** added the crud ([577584e](https://git.tuffraid.net/cowch/lst/commits/577584ef4dd10ee7f57ab0ad0d6261adddaf8966))
* **form stuff:** added in a searchable dropdown and added to new forklifts ([b23bb0d](https://git.tuffraid.net/cowch/lst/commits/b23bb0db31f78f46ffc556577cadb62e0bfa3b83))
* **invoice form:** added new invoice form ([65304f6](https://git.tuffraid.net/cowch/lst/commits/65304f61ceb3ad4655757aa5c291ac4ed77db048))
* **invoices:** added invoice + linking to forklift ([2e05f6e](https://git.tuffraid.net/cowch/lst/commits/2e05f6eeee052a92095098c73ace0bd331c43b22))
* **leases:** added in leases and move table to reuseable component ([bd7bea8](https://git.tuffraid.net/cowch/lst/commits/bd7bea8db697f5b025b8d93f86677a9a69cdf2b4))
* **listeners:** added in a new feature to auto add new listeners ([f9cfada](https://git.tuffraid.net/cowch/lst/commits/f9cfada8409b3a88323dafa80730c5565c067da8))
* **materials per day:** more work on materials per day ([564f0b5](https://git.tuffraid.net/cowch/lst/commits/564f0b5addd109018a806edd6a1fed4399ea63aa))
* **migration:** settings migration from old app all is now in the new app ([40bc19a](https://git.tuffraid.net/cowch/lst/commits/40bc19aa6f952a7a60b5ee8281fa159ca114161f))
* **missing inv:** adding a way to check for missing data in case it dose pull on the correct days ([d17edb1](https://git.tuffraid.net/cowch/lst/commits/d17edb1f9c830a2c17d28bd9180d264607d66fa2))
* **mobile:** ota updates added ([b6030de](https://git.tuffraid.net/cowch/lst/commits/b6030de4f44e73ce8bb9152886d384b9d7f2edff))
* **notify:** material per day for the next 90 days ([c509c7f](https://git.tuffraid.net/cowch/lst/commits/c509c7fe286a43ab0ffbf86635631477237632b5))
* **quality:** added in comments ([566754b](https://git.tuffraid.net/cowch/lst/commits/566754bf2ecfc390bc927b48aadb2fa934353769))
* **quality:** added location moved to to the table ([9572b71](https://git.tuffraid.net/cowch/lst/commits/9572b7159235c18617ff46058c94dfd9cfab8abc))
* **quality:** priority ranking added ([c777395](https://git.tuffraid.net/cowch/lst/commits/c777395b0350f60bd457c3164ed1ae478249df3a))
* **scroll view:** added in a scroll view to for quality so it dose not go over the end of the page ([b0ac326](https://git.tuffraid.net/cowch/lst/commits/b0ac326752331ab01ad981fa7b1022e82beab143))
* **servers:** added a link to the server by clicking on the name and the gp code ([00ef72d](https://git.tuffraid.net/cowch/lst/commits/00ef72de90e43c12bd3fecdc08dfa1e3a4f881fb))
* **settings:** added in dyco printing settings ([2ed6bf4](https://git.tuffraid.net/cowch/lst/commits/2ed6bf4d1f32f9a92712ccb36d4a4146ca112e85))
* **settings:** final migration of settings and edits added ([7e15e5d](https://git.tuffraid.net/cowch/lst/commits/7e15e5d7bcdf58f31bd96564be1f213d01d37cda))
* **start of server:** added the start of server data ([d60c08a](https://git.tuffraid.net/cowch/lst/commits/d60c08a281cd63f2183381a1a19c5e196b41fbc5))
* **templates:** added bug repot template ([79f4121](https://git.tuffraid.net/cowch/lst/commits/79f4121311df733f5dc59b32a6b32c1b4a32f97b))
## [1.7.0](https://git.tuffraid.net/cowch/lst/compare/v1.6.0...v1.7.0) (2025-10-30)

View File

@@ -0,0 +1,16 @@
meta {
name: Error logging
type: http
seq: 4
}
get {
url: {{urlv2}}/api/notify/toomanyerrors
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,22 @@
meta {
name: sscc
type: http
seq: 4
}
post {
url: {{url}}/lst/old/api/logistics/getsscc
body: json
auth: inherit
}
body:json {
{
"runningNr": ""
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: PSI - Forecast data
type: http
seq: 1
}
get {
url: {{url}}/lst/old/api/datamart/psiforecastdata?customer=8
body: none
auth: inherit
}
params:query {
customer: 8
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,22 @@
meta {
name: PSI -planning data
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/datamart/psiplanningdata?avs=118,120&startDate=12/1/2025&endDate=12/31/2026
body: none
auth: inherit
}
params:query {
avs: 118,120
startDate: 12/1/2025
endDate: 12/31/2026
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: datamart
seq: 8
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,26 @@
meta {
name: Logs
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/logger/logs?service=ocp&service=rfid&service=dyco&level=error&level=info&level=warn&hours=12
body: none
auth: inherit
}
params:query {
service: ocp
service: rfid
service: dyco
level: error
level: info
level: warn
hours: 12
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,23 @@
meta {
name: Consume
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/consume
body: json
auth: inherit
}
body:json {
{
"lotNum":283559,
"runningNr":19302907
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Consume
seq: 5
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,16 @@
meta {
name: SSCC
type: http
seq: 2
}
get {
url: {{url}}/lst/api/logistics/getsscc
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: Update Setting
type: http
seq: 4
}
post {
url: {{url}}/lst/api/system/settings/:token
body: none
auth: inherit
}
params:path {
token: test3
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -1,6 +1,6 @@
{
"version": "1",
"name": "LogisticsSupportTool_API_DOCS",
"name": "lstv2",
"type": "collection",
"ignore": [
"node_modules",

View File

@@ -1,5 +1,5 @@
vars {
url: https://usflo1prod.alpla.net
url: http://localhost:5500
session_cookie:
urlv2: http://usbow1vms006:3000
jwtV2:

View File

@@ -0,0 +1,24 @@
meta {
name: bookout
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/logistics/bookout
body: json
auth: none
}
body:json {
{
"runningNr": "1865027",
"reason": "packer printed premature"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 7
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,24 @@
meta {
name: relocate
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/relocate
body: json
auth: inherit
}
body:json {
{
"runningNr": "56121541",
"laneID": "30006"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,24 @@
meta {
name: removeAsWaste
type: http
seq: 3
}
post {
url: {{url}}/lst/old/api/logistics/removeasreusable
body: json
auth: none
}
body:json {
{
"runningNr": "1865018",
"reason": "validating stockout"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: v3endpoints
seq: 5
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,16 @@
meta {
name: tester
type: http
seq: 1
}
post {
url: http://localhost:3000/lst/api/system/prodsql/start
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -20,9 +20,7 @@ import { baseSettings } from "./src/internal/system/controller/settings/baseSett
import {
addListeners,
manualFixes,
settingsMigrate,
} from "./src/internal/system/utlis/addListeners.js";
import { swaggerOptions } from "./src/pkg/apiDocs/swaggerOptions.js";
import { auth } from "./src/pkg/auth/auth.js";
import { db } from "./src/pkg/db/db.js";
import { settings } from "./src/pkg/db/schema/settings.js";
@@ -35,6 +33,9 @@ import { sendNotify } from "./src/pkg/utils/notify.js";
import { returnFunc } from "./src/pkg/utils/return.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import { setupIoServer } from "./src/ws/server.js";
import { swaggerConfig, swaggerUiOptions } from "./src/internal/swagger/config.js";
import { setupSwagger } from "./src/internal/swagger/swagger.js";
const main = async () => {
const env = validateEnv(process.env);
@@ -77,7 +78,7 @@ const main = async () => {
// connect to the prod sql
console.log("Connecting to the sql server");
await initializeProdPool();
// express app
const app = express();
@@ -177,13 +178,14 @@ const main = async () => {
);
// docs and routes
const openapiSpec: any = swaggerJsdoc(swaggerOptions);
app.use(
basePath + "/api/docs",
swaggerUi.serve,
swaggerUi.setup(openapiSpec),
);
// const openapiSpec: any = swaggerJsdoc(swaggerConfig);
// app.use(
// basePath + "/api/docs",
// swaggerUi.serve,
// swaggerUi.setup(openapiSpec, swaggerUiOptions),
// );
initializeProdPool();
setupSwagger(app, basePath)
app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build")));
app.use(
basePath + "/app",
@@ -219,9 +221,9 @@ const main = async () => {
addListeners();
//userMigrate();
// some temp fixes
// above 230 remove these
// above 235 remove these
manualFixes();
settingsMigrate();
//settingsMigrate();
}, 5 * 1000);
// setTimeout(() => {

View File

@@ -24,6 +24,13 @@ router.post("/", async (req: Request, res: Response) => {
.from(user)
.where(eq(user.username, validated.username));
if(userLogin.length === 0 ){
return res.status(200).json({
success: false,
message: `It appears you do not have a user yet please head over to the register page and create a user then try again.`,
});
}
if (
!userLogin[0].lastLogin ||
differenceInDays(userLogin[0].lastLogin, new Date(Date.now())) > 120

View File

@@ -0,0 +1,89 @@
import { Router, type Request, type Response } from "express";
import { prodQuery } from "../../../pkg/prodSql/prodQuery.js";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { db } from "../../../pkg/db/db.js";
import { settings } from "../../../pkg/db/schema/settings.js";
import { eq } from "drizzle-orm";
import { activeArticle } from "../../../pkg/prodSql/querys/datamart/article.js";
type Articles = {
article: string
description: string
articleType: string
pricePoint:string
salesPrice:string
typeOfMaterial:string
articleIdType:string
articleWeight:string
idAddress:string
addressDescription:string
addressType:string
profitCenter:String
fg: string
num_of_cycles:string
costsCenterId:string
costCenterDescription:string
customerArticleNumber:string
customerArticleDescription:String
cycleTime:string
salesAgreement:string
productFamily:string
uom:string
}
const router = Router();
// GET /health
router.get("/", async (req: Request, res: Response) => {
const includePlantToken = req.params.includePlantToken
let articles:Articles[] = [];
try {
const res = await prodQuery(activeArticle, "Get active articles");
articles = res?.data;
}
catch (error) {
return {
success: false,
message:"Error getting articles",
error: error
};
}
if (includePlantToken) {
const { data, error } = await tryCatch(db.select().from(settings).where(eq(settings.name, "plantToken")))
if (error) {
return {
success: false,
message:"Error getting settings",
error: error
};
}
// return articles.map((n) => {
// return {
// success: true,
// message: "Active articles including plant token",
// data:{ plantToken: data[0].value, ...n }};
// });
return {
success: true,
message: "Active articles including plant token",
data: articles.map((n) => {
return { plantToken: data[0].value, ...n }
})
}
}
else {
return {
success: true,
message: "Active articles including plant token",
data:articles};
}
});
export default router;

View File

@@ -0,0 +1,10 @@
import type { Express, Request, Response } from "express";
//datamart Routes
import getActiveAv from './getActiveAv.js'
export const setupDataMartRoutes = (app: Express, basePath: string) => {
const route = basePath + "/api/datamart"
app.use(route + '/activeArticle', getActiveAv);
//app.use(basePath + "/api/user/me", requireAuth(), me);
};

View File

@@ -12,7 +12,6 @@ import { db } from "../../../../pkg/db/db.js";
import {
type ForecastData,
forecastData,
forecastDataSchema,
} from "../../../../pkg/db/schema/forecastEDIData.js";
import { prodQuery } from "../../../../pkg/prodSql/prodQuery.js";
import { activeArticle } from "../../../../pkg/prodSql/querys/datamart/article.js";
@@ -36,19 +35,21 @@ export const forecastEdiData = async (data: ForecastData[]) => {
for (let i = 0; i < data.length; i++) {
const activeAV = article?.data.filter(
(c: any) =>
c?.CustomerArticleNumber === data[i].customerArticleNo?.toString(),
c?.customerArticleNumber === data[i].customerArticleNo?.toString(),
);
const newData = data[i];
//console.log(activeAV[0].IdArtikelvarianten);
forecaseEDIDATA.push({
...newData,
article: activeAV[0].IdArtikelvarianten,
article: activeAV.length > 0 ? activeAV[0].article : 0,
description:
activeAV.length > 0 ? activeAV[0].description : "No Av Created",
requirementDate: new Date(newData.requirementDate),
});
}
console.log(forecaseEDIDATA[0]);
//console.log(forecaseEDIDATA[0]);
const { data: f, error: ef } = await tryCatch(
db.insert(forecastData).values(forecaseEDIDATA),
);

View File

@@ -5,6 +5,7 @@ import { setupForkliftRoutes } from "../forklifts/routes/routes.js";
import { setupLogisticsRoutes } from "../logistics/routes.js";
import { setupSystemRoutes } from "../system/routes.js";
import { setupMobileRoutes } from "../mobile/route.js";
import { setupDataMartRoutes } from "../datamart/routes/routes.js";
export const setupRoutes = (app: Express, basePath: string) => {
// all routes
@@ -14,6 +15,7 @@ export const setupRoutes = (app: Express, basePath: string) => {
setupLogisticsRoutes(app, basePath);
setupForkliftRoutes(app, basePath);
setupMobileRoutes(app, basePath);
setupDataMartRoutes(app, basePath)
// always try to go to the app weather we are in dev or in production.
app.get(basePath + "/", (req: Request, res: Response) => {

View File

@@ -0,0 +1,59 @@
export const swaggerUiOptions = {
explorer: true,
customCss: ".swagger-ui .topbar { display: none }",
customSiteTitle: "LST API Documentation",
swaggerOptions: {
persistAuthorization: true,
displayRequestDuration: true,
filter: true,
syntaxHighlight: {
activate: true,
theme: "monokai",
},
},
};
export const swaggerConfig = {
definition: {
openapi: "3.0.0",
info: {
title: "Logistics Support Tool",
version: "1.8.0",
description: "Complete API documentation for lst",
contact: {
name: "API Support",
email: "blake.matthes@alpla.com",
},
},
servers: [
{
url: "http://localhost:4200",
description: "Development server",
},
{
url: "https://api.yourapp.com",
description: "Production server",
},
],
components: {
securitySchemes: {
bearerAuth: {
type: "http",
scheme: "bearer",
bearerFormat: "JWT",
},
apiKey: {
type: "apiKey",
in: "header",
name: "X-API-Key",
},
},
},
security: [
{
bearerAuth: [],
},
],
},
apis: [], // We'll populate this dynamically
};

View File

@@ -0,0 +1,129 @@
const loginEndpoint = {
'/lst/api/user/login': {
post: {
tags: ['Authentication'],
summary: 'Login to get a token',
description: 'User enters username and password, gets back a JWT token and session data',
// What the user sends you
requestBody: {
required: true,
content: {
'application/json': {
schema: {
type: 'object',
required: ['username', 'password'],
properties: {
username: {
type: 'string',
example: 'smith01'
},
password: {
type: 'string',
example: 'MyPassword123'
}
}
}
}
}
},
// What you send back to the user
responses: {
// SUCCESS - Login worked
200: {
description: 'Login successful',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
success: {
type: 'boolean',
example: true
},
message: {
type: 'string',
example: 'Login successful'
},
data: {
type: 'object',
properties: {
token: {
type: 'string',
example: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...'
},
user: {
type: 'object',
properties: {
id: {
type: 'string',
example: '12345'
},
email: {
type: 'string',
example: 'user@example.com'
},
username: {
type: 'string',
example: 'johndoe'
}
}
}
}
}
}
}
}
}
},
// ERROR - Wrong password or email
401: {
description: 'Wrong email or password',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
success: {
type: 'boolean',
example: false
},
message: {
type: 'string',
example: 'Invalid credentials'
}
}
}
}
}
},
// ERROR - Missing fields
400: {
description: 'Missing email or password',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
success: {
type: 'boolean',
example: false
},
message: {
type: 'string',
example: 'Email and password are required'
}
}
}
}
}
}
}
}
}
};
export default loginEndpoint;

View File

@@ -0,0 +1,31 @@
import swaggerJsdoc from 'swagger-jsdoc';
import swaggerUi from 'swagger-ui-express';
import { swaggerConfig, swaggerUiOptions } from './config.js';
import { type Express } from 'express';
import loginEndpoint from './endpoints/auth/login.js';
const allPaths = {
...loginEndpoint,
// When you add more endpoints, add them here:
// ...registerEndpoint,
// ...logoutEndpoint,
};
const swaggerSpec = {
...swaggerConfig.definition,
paths: allPaths
};
const specs = swaggerJsdoc({
...swaggerConfig,
definition: swaggerSpec
});
export function setupSwagger(app: Express, basePath: string): void {
// Swagger UI at /api-docs
app.use(basePath + "/api/docs", swaggerUi.serve, swaggerUi.setup(specs, swaggerUiOptions));
//console.log('📚 Swagger docs at http://localhost:3000/api-docs');
}

View File

@@ -5,66 +5,31 @@ import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { settings } from "../../../../pkg/db/schema/settings.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:token", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update server" });
router.patch("/:id", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update setting" });
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token;
const id = req.params.id;
const updates: Record<string, any> = {};
if (req.body?.name !== undefined) {
updates.name = req.body.name;
}
if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS;
if (req.body?.value !== undefined) {
updates.value = req.body.value;
}
if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress;
if (req.body?.description !== undefined) {
updates.description = req.body.description;
}
if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
}
if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort;
}
if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc;
}
if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress;
}
if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState;
}
if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode;
}
if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail;
}
if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone;
}
if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc;
}
if (req.body?.active !== undefined) {
updates.active = req.body.active;
if (req.body?.moduleName !== undefined) {
updates.moduleName = req.body.moduleName;
}
updates.upd_user = req.user!.username || "lst_user";
@@ -73,65 +38,12 @@ router.patch("/:token", async (req: Request, res: Response) => {
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.update(settings)
.set(updates)
.where(eq(serverData.plantToken, token));
.where(eq(settings.settings_id, id));
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
const setCookie = loginRes?.headers["set-cookie"][0];
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
},
),
);
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });
res.status(200).json({ message: `Setting was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });

View File

@@ -58,6 +58,8 @@ router.get("/", async (req, res) => {
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024
).toFixed(2)} MB`,
eomFGPkgSheetVersion: 1, // this is the excel file version when we have a change to the macro we want to grab this
masterMacroFile: 1, // this is the excel file version when we have a change to the macro we want to grab this
});
});

View File

@@ -65,8 +65,6 @@ export const addListeners = async () => {
// all the migration stuff that will need to be moved later build 230 and above will need to remove
export const manualFixes = async () => {
const fixQuery = `ALTER TABLE "serverData" ADD CONSTRAINT "serverData_name_unique" UNIQUE("name");`;
const log = createLogger({ module: "utils", subModule: "manual fixes" });
const client = new Client({
connectionString: `postgresql://${process.env.DATABASE_USER}:${process.env.DATABASE_PASSWORD}@${process.env.DATABASE_HOST}:${process.env.DATABASE_PORT}/${process.env.DATABASE_DB}`,
@@ -74,12 +72,16 @@ export const manualFixes = async () => {
await client.connect();
try {
log.info({}, "Running the manual fix");
await client.query(fixQuery);
} catch (e) {
log.info({ error: e }, "Fix was not completed");
}
/**
* The fix to correct the constraint on the server data
*/
// const fixQuery = `ALTER TABLE "serverData" ADD CONSTRAINT "serverData_name_unique" UNIQUE("name");`;
// try {
// log.info({}, "Running the manual fix");
// await client.query(fixQuery);
// } catch (e) {
// log.info({ error: e }, "Fix was not completed");
// }
};
export const settingsMigrate = async () => {

View File

@@ -1,11 +0,0 @@
export const swaggerOptions = {
definition: {
openapi: "3.0.0",
info: {
title: "Logistics Support Tool",
version: "1.0.0",
},
},
// globs where swagger-jsdoc should look for annotations:
apis: ["../../src/**/*.ts"],
};

View File

@@ -18,6 +18,7 @@ export const forecastData = pgTable("forecast_Data", {
quantity: real("quantity"),
requirementDate: timestamp("requirement_date").notNull(),
article: integer("article"),
description: text("description"),
createdAt: timestamp("created_at").defaultNow(),
});

View File

@@ -1,6 +1,12 @@
import { returnFunc } from "../utils/return.js";
import { connected, pool } from "./prodSqlConnect.js";
import { validateEnv } from "../utils/envValidator.js";
import { returnFunc } from "../utils/return.js";
import {
closePool,
connected,
pool,
reconnecting,
reconnectToSql,
} from "./prodSqlConnect.js";
const env = validateEnv(process.env);
/**
@@ -11,48 +17,65 @@ const env = validateEnv(process.env);
* You must use test1 always as it will be changed via query
*/
export async function prodQuery(queryToRun: string, name: string) {
if (!connected) {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `The sql ${env.PROD_PLANT_TOKEN} is not connected`,
notify: false,
data: [],
});
}
const query = queryToRun.replaceAll("test1", env.PROD_PLANT_TOKEN);
try {
const result = await pool.request().query(query);
return {
success: true,
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
console.log(error);
if (error.code === "ETIMEOUT") {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} did not run due to a timeout.`,
notify: false,
data: [error],
});
}
if (!connected) {
reconnectToSql();
if (error.code === "EREQUEST") {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} encountered an error ${error.originalError.info.message}`,
data: [],
});
}
}
if (reconnecting) {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `The sql ${env.PROD_PLANT_TOKEN} is trying to reconnect already`,
notify: false,
data: [],
});
} else {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `The sql ${env.PROD_PLANT_TOKEN} is not connected`,
notify: false,
data: [],
});
}
}
const query = queryToRun.replaceAll("test1", env.PROD_PLANT_TOKEN);
try {
const result = await pool.request().query(query);
return {
success: true,
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
console.log(error);
if (error.code === "ETIMEOUT") {
closePool();
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} did not run due to a timeout.`,
notify: false,
data: [error],
});
}
if (error.code === "EREQUEST") {
closePool();
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} encountered an error ${error.originalError.info.message}`,
data: [],
});
}
}
}

View File

@@ -1,136 +1,134 @@
import sql from "mssql";
import { checkHostnamePort } from "../utils/checkHostNamePort.js";
import { sqlConfig } from "./prodSqlConfig.js";
import { createLogger } from "../logger/logger.js";
import { returnFunc } from "../utils/return.js";
import { checkHostnamePort } from "../utils/checkHostNamePort.js";
import { validateEnv } from "../utils/envValidator.js";
import { returnFunc } from "../utils/return.js";
import { sqlConfig } from "./prodSqlConfig.js";
const env = validateEnv(process.env);
export let pool: any;
export let connected: boolean = false;
let reconnecting = false;
export let reconnecting = false;
export const initializeProdPool = async () => {
const log = createLogger({ module: "prodSql" });
const log = createLogger({ module: "prodSql" });
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
if (!serverUp) {
reconnectToSql();
return returnFunc({
success: false,
module: "prodSql",
level: "fatal",
message: `The sql ${env.PROD_SERVER} is not reachable`,
data: [],
});
}
if (!serverUp) {
reconnectToSql();
return returnFunc({
success: false,
module: "prodSql",
level: "fatal",
message: `The sql ${env.PROD_SERVER} is not reachable`,
data: [],
});
}
// if you were restarting from the endpoint you get this lovely error
if (connected) {
return returnFunc({
success: false,
module: "prodSql",
level: "error",
message: `There is already a connection to ${env.PROD_PLANT_TOKEN}`,
data: [],
});
}
try {
pool = await sql.connect(sqlConfig);
// if you were restarting from the endpoint you get this lovely error
if (connected) {
return returnFunc({
success: false,
module: "prodSql",
level: "error",
message: `There is already a connection to ${env.PROD_PLANT_TOKEN}`,
data: [],
});
}
try {
pool = await sql.connect(sqlConfig);
log.info(
`Connected to ${sqlConfig?.server}, using DB: ${sqlConfig?.database}`
);
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(
error
)}, "There was an error connecting to the pool."`
);
reconnectToSql();
// throw new Error("There was an error closing the sql connection");
}
log.info(
`Connected to ${sqlConfig?.server}, using DB: ${sqlConfig?.database}`,
);
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(error)}, "There was an error connecting to the pool."`,
);
reconnectToSql();
// throw new Error("There was an error closing the sql connection");
}
};
const reconnectToSql = async () => {
const log = createLogger({ module: "prodSql" });
if (reconnecting) return;
reconnecting = true;
export const reconnectToSql = async () => {
const log = createLogger({ module: "prodSql" });
if (reconnecting) return;
reconnecting = true;
let delay = 2000; // start at 2s
let attempts = 0;
const maxAttempts = 10; // or limit by time, e.g. 2 min total
let delay = 2000; // start at 2s
let attempts = 0;
const maxAttempts = 10; // or limit by time, e.g. 2 min total
while (!connected && attempts < maxAttempts) {
attempts++;
log.info(
`Reconnect attempt ${attempts}/${maxAttempts} in ${
delay / 1000
}s...`
);
while (!connected && attempts < maxAttempts) {
attempts++;
log.info(
`Reconnect attempt ${attempts}/${maxAttempts} in ${delay / 1000}s...`,
);
await new Promise((res) => setTimeout(res, delay));
await new Promise((res) => setTimeout(res, delay));
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
if (!serverUp) {
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
continue;
}
if (!serverUp) {
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
continue;
}
try {
pool = sql.connect(sqlConfig);
try {
pool = sql.connect(sqlConfig);
log.info(
`Connected to ${sqlConfig?.server}, and looking at ${sqlConfig?.database}`
);
reconnecting = false;
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(
error
)}, "There was an error connecting to the pool."`
);
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
// throw new Error("There was an error closing the sql connection");
}
}
log.info(
`Connected to ${sqlConfig?.server}, and looking at ${sqlConfig?.database}`,
);
reconnecting = false;
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(
error,
)}, "There was an error connecting to the pool."`,
);
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
// throw new Error("There was an error closing the sql connection");
}
}
if (!connected) {
log.fatal(
{ notify: true },
"Max reconnect attempts reached on the prodSql server. Stopping retries."
);
reconnecting = false;
// optional: exit process or alert someone here
// process.exit(1);
}
if (!connected) {
log.fatal(
{ notify: true },
"Max reconnect attempts reached on the prodSql server. Stopping retries.",
);
reconnecting = false;
// exit process or alert someone here
// process.exit(1);
}
};
export const closePool = async () => {
const log = createLogger({ module: "prodSql" });
if (!connected) {
log.error("There is no connection a connection.");
return { success: false, message: "There is already a connection." };
}
try {
await pool.close();
log.info("Connection pool closed");
connected = false;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
log.fatal(
{ notify: true },
`${JSON.stringify(
error
)}, "There was an error closing the sql connection"`
);
}
const log = createLogger({ module: "prodSql" });
if (!connected) {
log.error("There is no connection a connection.");
return { success: false, message: "There is already a connection." };
}
try {
await pool.close();
log.info("Connection pool closed");
connected = false;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
connected = false;
log.info(
//{ notify: true },
{ error: error },
`${JSON.stringify(
error,
)}, "There was an error closing the sql connection"`,
);
}
};

View File

@@ -1,12 +1,12 @@
export const activeArticle = `
use AlplaPROD_test1
SELECT V_Artikel.IdArtikelvarianten,
V_Artikel.Bezeichnung,
V_Artikel.ArtikelvariantenTypBez,
V_Artikel.PreisEinheitBez,
SELECT V_Artikel.IdArtikelvarianten as article,
V_Artikel.Bezeichnung as description,
V_Artikel.ArtikelvariantenTypBez as articleType,
V_Artikel.PreisEinheitBez as pricePoint,
case when sales.price is null then 0 else sales.price end as salesPrice,
TypeOfMaterial=CASE
CASE
WHEN
V_Artikel.ArtikelvariantenTypBez LIKE'%Additive'
Then 'AD'
@@ -90,14 +90,15 @@ THEN 'Caps'
When
V_Artikel.ArtikelvariantenTypBez = 'Dummy'
THEN 'Not used'
ELSE 'Item not defined' END
,V_Artikel.IdArtikelvariantenTyp,
Round(V_Artikel.ArtikelGewicht, 3) as Article_Weight,
IdAdresse,
AdressBez,
AdressTypBez,
ProdBereichBez,
FG=case when
ELSE 'Item not defined' END as typeOfMaterial
,V_Artikel.IdArtikelvariantenTyp as articleIdType,
Round(V_Artikel.ArtikelGewicht, 3) as articleWeight,
IdAdresse as idAddress,
AdressBez as addressDescription,
AdressTypBez as addressType,
ProdBereichBez as profitCenter,
case when
V_Artikel.ProdBereichBez = 'SBM' or
V_Artikel.ProdBereichBez = 'IM-Caps' or
V_Artikel.ProdBereichBez = 'IM-PET' or
@@ -107,15 +108,16 @@ V_Artikel.ProdBereichBez = 'ISBM' or
V_Artikel.ProdBereichBez = 'IM-Finishing'
Then 'FG'
Else 'not Defined Profit Center'
end,
end as fg,
V_Artikel.Umlaeufe as num_of_cycles,
V_FibuKonten_BASIS.FibuKontoNr as CostsCenterId,
V_FibuKonten_BASIS.Bezeichnung as CostCenterDescription,
sales.[KdArtNr] as CustomerArticleNumber,
sales.[KdArtBez] as CustomerArticleDescription,
round(V_Artikel.Zyklus, 2) as CycleTime,
V_FibuKonten_BASIS.FibuKontoNr as costsCenterId,
V_FibuKonten_BASIS.Bezeichnung as costCenterDescription,
sales.[KdArtNr] as customerArticleNumber,
sales.[KdArtBez] as customerArticleDescription,
round(V_Artikel.Zyklus, 2) as cycleTime,
Sypronummer as salesAgreement,
V_Artikel.ProdArtikelBez as ProductFamily
V_Artikel.ProdArtikelBez as productFamily
--,REPLACE(pur.UOM,'UOM:','')
,Case when LEFT(
LTRIM(REPLACE(pur.UOM,'UOM:','')),
@@ -123,7 +125,7 @@ V_Artikel.ProdArtikelBez as ProductFamily
) is null then '1' else LEFT(
LTRIM(REPLACE(pur.UOM,'UOM:','')),
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
) end AS UOM
) end AS uom
--,*
FROM dbo.V_Artikel (nolock)

View File

@@ -1,123 +1,125 @@
import type { Address } from "nodemailer/lib/mailer/index.js";
import type { Transporter } from "nodemailer";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import type Mail from "nodemailer/lib/mailer/index.js";
import os from "os";
import nodemailer from "nodemailer";
import type Mail from "nodemailer/lib/mailer/index.js";
import type { Address } from "nodemailer/lib/mailer/index.js";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import hbs from "nodemailer-express-handlebars";
import os from "os";
import path from "path";
import { fileURLToPath } from "url";
import { promisify } from "util";
import hbs from "nodemailer-express-handlebars";
import { createLogger } from "../../logger/logger.js";
interface HandlebarsMailOptions extends Mail.Options {
template: string;
context: Record<string, unknown>;
template: string;
context: Record<string, unknown>;
}
interface EmailData {
email: string;
subject: string;
template: string;
context: Record<string, unknown>;
email: string;
subject: string;
template: string;
context: Record<string, unknown>;
}
export const sendEmail = async (data: EmailData): Promise<any> => {
const log = createLogger({ module: "pkg", subModule: "sendMail" });
let transporter: Transporter;
let fromEmail: string | Address;
const log = createLogger({ module: "pkg", subModule: "sendMail" });
let transporter: Transporter;
let fromEmail: string | Address;
if (
os.hostname().includes("OLP") &&
process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD
) {
transporter = nodemailer.createTransport({
service: "gmail",
auth: {
user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD,
},
//debug: true,
});
// if (
// os.hostname().includes("OLP") &&
// process.env.EMAIL_USER &&
// process.env.EMAIL_PASSWORD
// ) {
// transporter = nodemailer.createTransport({
// service: "gmail",
// auth: {
// user: process.env.EMAIL_USER,
// pass: process.env.EMAIL_PASSWORD,
// },
// //debug: true,
// });
// update the from email
fromEmail = process.env.EMAIL_USER;
} else {
// convert to the correct plant token.
// // update the from email
// fromEmail = process.env.EMAIL_USER;
// } else {
// // convert to the correct plant token.
let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
//let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
//const testServers = ["vms036", "VMS036"];
//const testServers = ["vms036", "VMS036"];
if (os.hostname().includes("VMS036")) {
host = "USMCD1-smtp.alpla.net";
}
// if (os.hostname().includes("VMS036")) {
// host = "USMCD1-smtp.alpla.net";
// }
// if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net";
// }
// if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net";
// }
transporter = nodemailer.createTransport({
host: host,
port: 25,
rejectUnauthorized: false,
//secure: false,
// auth: {
// user: "alplaprod",
// pass: "obelix",
// },
debug: true,
} as SMTPTransport.Options);
transporter = nodemailer.createTransport({
host: "smtp.azurecomm.net",
port: 587,
//rejectUnauthorized: false,
tls: {
minVersion: "TLSv1.2",
},
auth: {
user: "donotreply@mail.alpla.com",
pass: process.env.SMTP_PASSWORD,
},
debug: true,
} as SMTPTransport.Options);
// update the from email
fromEmail = `noreply@alpla.com`;
}
// update the from email
fromEmail = `DoNotReply@mail.alpla.com`;
//}
// creating the handlbar options
const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)),
"./views/"
);
// creating the handlbar options
const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)),
"./views/",
);
const handlebarOptions = {
viewEngine: {
extname: ".hbs",
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
defaultLayout: "", // Specify the default layout
partialsDir: viewPath,
},
viewPath: viewPath,
extName: ".hbs", // File extension for Handlebars templates
};
const handlebarOptions = {
viewEngine: {
extname: ".hbs",
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
defaultLayout: "", // Specify the default layout
partialsDir: viewPath,
},
viewPath: viewPath,
extName: ".hbs", // File extension for Handlebars templates
};
transporter.use("compile", hbs(handlebarOptions));
transporter.use("compile", hbs(handlebarOptions));
const mailOptions: HandlebarsMailOptions = {
from: fromEmail,
to: data.email,
subject: data.subject,
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
//html: emailTemplate("BlakesTest", "This is an example with css"),
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
context: data.context,
};
const mailOptions: HandlebarsMailOptions = {
from: fromEmail,
to: data.email,
subject: data.subject,
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
//html: emailTemplate("BlakesTest", "This is an example with css"),
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
context: data.context,
};
// now verify and send the email
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
// now verify and send the email
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
try {
// Send email and await the result
const info = await sendMailPromise(mailOptions);
log.info(null, `Email was sent to: ${data.email}`);
return { success: true, message: "Email sent.", data: info };
} catch (err) {
console.log(err);
log.error(
{ error: err },
try {
// Send email and await the result
const info = await sendMailPromise(mailOptions);
log.info(null, `Email was sent to: ${data.email}`);
return { success: true, message: "Email sent.", data: info };
} catch (err) {
console.log(err);
log.error(
{ error: err },
`Error sending Email to : ${data.email}`
);
return { success: false, message: "Error sending email.", error: err };
}
`Error sending Email to : ${data.email}`,
);
return { success: false, message: "Error sending email.", error: err };
}
};

View File

@@ -316,7 +316,7 @@ func runNPMInstall(rootDir string, folder string) error {
} else {
folderDir = filepath.Join(rootDir, folder)
}
cmd := exec.Command("npm", "install")
cmd := exec.Command("npm", "install", "--production")
cmd.Dir = folderDir
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr

File diff suppressed because it is too large Load Diff

View File

@@ -11,31 +11,31 @@
},
"dependencies": {
"@dnd-kit/core": "^6.3.1",
"@radix-ui/react-avatar": "^1.1.10",
"@radix-ui/react-avatar": "^1.1.11",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-label": "^2.1.7",
"@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-tooltip": "^1.2.8",
"@react-pdf/renderer": "^4.3.1",
"@tailwindcss/vite": "^4.1.13",
"@tanstack/react-form": "^1.23.0",
"@tanstack/react-query": "^5.89.0",
"@tanstack/react-query-devtools": "^5.90.2",
"@tanstack/react-router": "^1.131.36",
"@tanstack/react-router-devtools": "^1.131.36",
"@tailwindcss/vite": "^4.1.17",
"@tanstack/react-form": "^1.26.0",
"@tanstack/react-query": "^5.90.11",
"@tanstack/react-query-devtools": "^5.91.1",
"@tanstack/react-router": "^1.139.6",
"@tanstack/react-router-devtools": "^1.139.6",
"@tanstack/react-table": "^8.21.3",
"@types/react-calendar-timeline": "^0.28.6",
"axios": "^1.12.2",
"better-auth": "^1.3.11",
"axios": "^1.13.2",
"better-auth": "^1.4.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
@@ -43,39 +43,38 @@
"is-mobile": "^5.0.0",
"js-cookie": "^3.0.5",
"jsbarcode": "^3.12.1",
"lucide-react": "^0.542.0",
"marked": "^16.4.1",
"lucide-react": "^0.554.0",
"marked": "^17.0.1",
"moment": "^2.30.1",
"r": "^0.0.5",
"react": "^19.1.1",
"react": "^19.2.0",
"react-barcode": "^1.6.1",
"react-calendar-timeline": "^0.30.0-beta.3",
"react-day-picker": "^9.11.1",
"react-dom": "^19.1.1",
"react-hook-form": "^7.65.0",
"react-calendar-timeline": "^0.30.0-beta.4",
"react-day-picker": "^9.11.2",
"react-dom": "^19.2.0",
"react-hook-form": "^7.66.1",
"react-resizable-panels": "^3.0.6",
"recharts": "^2.15.4",
"socket.io-client": "^4.8.1",
"sonner": "^2.0.7",
"tailwind-merge": "^3.3.1",
"tailwindcss": "^4.1.13",
"tailwind-merge": "^3.4.0",
"tailwindcss": "^4.1.17",
"zustand": "^5.0.8"
},
"devDependencies": {
"@eslint/js": "^9.33.0",
"@tanstack/router-plugin": "^1.131.36",
"@eslint/js": "^9.39.1",
"@tanstack/router-plugin": "^1.139.6",
"@types/js-cookie": "^3.0.6",
"@types/node": "^24.3.1",
"@types/react": "^19.1.10",
"@types/react-dom": "^19.1.7",
"@vitejs/plugin-react-swc": "^4.0.0",
"eslint": "^9.33.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.20",
"globals": "^16.3.0",
"tw-animate-css": "^1.3.8",
"typescript": "~5.8.3",
"typescript-eslint": "^8.39.1",
"vite": "^7.1.2"
"@types/node": "^24.10.1",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react-swc": "^4.2.2",
"eslint": "^9.39.1",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"globals": "^16.5.0",
"tw-animate-css": "^1.4.0",
"typescript": "~5.9.3",
"typescript-eslint": "^8.48.0",
"vite": "^7.2.4"
}
}

View File

@@ -15,7 +15,7 @@ export default function Admin() {
const items: Items[] = [
{
title: "Users",
url: "/lst/app/admin/users",
url: "/admin/users",
icon: User,
role: ["systemAdmin", "admin"],
module: "admin",
@@ -23,7 +23,7 @@ export default function Admin() {
},
{
title: "System",
url: "/lst/app/admin/settings",
url: "/admin/settings",
icon: Settings,
role: ["systemAdmin", "admin"],
module: "admin",
@@ -31,7 +31,7 @@ export default function Admin() {
},
{
title: "Modules",
url: "/lst/app/admin/modules",
url: "/admin/modules",
icon: Settings,
role: ["systemAdmin", "admin"],
module: "admin",
@@ -39,7 +39,7 @@ export default function Admin() {
},
{
title: "Servers",
url: "/lst/app/admin/servers",
url: "/admin/servers",
icon: Server,
role: ["systemAdmin", "admin"],
module: "admin",

View File

@@ -21,7 +21,7 @@ export default function ForkliftSideBar() {
const items: Items[] = [
{
title: "Lease Companies",
url: "/lst/app/forklifts/companies",
url: "/forklifts/companies",
icon: Building2,
role: ["systemAdmin", "admin"],
module: "forklifts",
@@ -29,7 +29,7 @@ export default function ForkliftSideBar() {
},
{
title: "Leases",
url: "/lst/app/forklifts/leases",
url: "/forklifts/leases",
icon: ReceiptText,
role: ["systemAdmin", "admin"],
module: "forklifts",
@@ -37,7 +37,7 @@ export default function ForkliftSideBar() {
},
{
title: "Invoices",
url: "/lst/app/forklifts/invoices",
url: "/forklifts/invoices",
icon: ReceiptText,
role: ["systemAdmin", "admin", "manager"],
module: "forklifts",
@@ -45,7 +45,7 @@ export default function ForkliftSideBar() {
},
{
title: "Repairs",
url: "/lst/app/admin/settings",
url: "/admin/settings",
icon: Wrench,
role: ["systemAdmin", "admin", "manager"],
module: "forklifts",
@@ -53,7 +53,7 @@ export default function ForkliftSideBar() {
},
{
title: "Hours",
url: "/lst/app/admin/settings",
url: "/admin/settings",
icon: Hourglass,
role: ["systemAdmin", "admin", "manager", "supervisor"],
module: "forklifts",
@@ -61,7 +61,7 @@ export default function ForkliftSideBar() {
},
{
title: "Forklifts",
url: "/lst/app/forklifts/forklifts",
url: "/forklifts/forklifts",
icon: Forklift,
role: ["systemAdmin", "admin", "manager", "supervisor"],
module: "forklifts",

View File

@@ -0,0 +1,18 @@
import { keepPreviousData, queryOptions } from "@tanstack/react-query";
import axios from "axios";
export function getSettings() {
return queryOptions({
queryKey: ["getSettings"],
queryFn: () => fetchSession(),
staleTime: 5000,
refetchOnWindowFocus: true,
placeholderData: keepPreviousData,
});
}
const fetchSession = async () => {
const { data } = await axios.get("/lst/api/system/settings");
return data.data;
};

View File

@@ -0,0 +1,26 @@
import { createColumnHelper } from "@tanstack/react-table";
import { ArrowDown, ArrowUp } from "lucide-react";
import { Button } from "@/components/ui/button";
export const GenericColumn = ({ columnName }: { columnName: string }) => {
const columnHelper = createColumnHelper();
return columnHelper.accessor(`${columnName}`, {
header: ({ column }) => {
return (
<Button
variant="ghost"
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
>
<span className="flex flex-row gap-2">{`${columnName.toUpperCase()}`}</span>
{column.getIsSorted() === "asc" ? (
<ArrowUp className="ml-2 h-4 w-4" />
) : (
<ArrowDown className="ml-2 h-4 w-4" />
)}
</Button>
);
},
cell: (i) => i.getValue(),
});
};

View File

@@ -1,6 +1,7 @@
import {
flexRender,
getCoreRowModel,
getFilteredRowModel,
getPaginationRowModel,
getSortedRowModel,
type SortingState,
@@ -26,6 +27,9 @@ export default function TableNoExpand({
columns: any;
}) {
const [sorting, setSorting] = useState<SortingState>([]);
// const [columnFilters, setColumnFilters] = React.useState<ColumnFiltersState>(
// []
// )
const table = useReactTable({
data,
columns,
@@ -33,11 +37,14 @@ export default function TableNoExpand({
getPaginationRowModel: getPaginationRowModel(),
onSortingChange: setSorting,
getSortedRowModel: getSortedRowModel(),
getFilteredRowModel: getFilteredRowModel(),
//renderSubComponent: ({ row }: { row: any }) => <ExpandedRow row={row} />,
//getRowCanExpand: () => true,
filterFns: {},
state: {
sorting,
//columnFilters
},
});
return (

View File

@@ -1,7 +1,31 @@
import { createFileRoute, Link, Outlet } from "@tanstack/react-router";
import {
createFileRoute,
Link,
Outlet,
redirect,
} from "@tanstack/react-router";
import { checkUserAccess } from "@/lib/authClient";
export const Route = createFileRoute("/_app/_adminLayout/admin/_system")({
component: RouteComponent,
beforeLoad: async () => {
const auth = await checkUserAccess({
allowedRoles: ["systemAdmin", "admin"],
moduleName: "system", // optional
});
if (!auth) {
throw redirect({
to: "/login",
search: {
// Use the current location to power a redirect after login
// (Do not use `router.state.resolvedLocation` as it can
// potentially lag behind the actual current location)
redirect: location.pathname + location.search,
},
});
}
},
});
function RouteComponent() {

View File

@@ -1,11 +1,230 @@
import { createFileRoute } from '@tanstack/react-router'
import { useMutation, useQuery } from "@tanstack/react-query";
import { createFileRoute } from "@tanstack/react-router";
import { createColumnHelper } from "@tanstack/react-table";
import axios from "axios";
import { ArrowDown, ArrowUp } from "lucide-react";
import { useEffect, useRef, useState } from "react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { getSettings } from "@/lib/querys/admin/getSettings";
import TableNoExpand from "@/lib/tableStuff/TableNoExpand";
type Settings = {
settings_id: string;
name: string;
active: boolean;
value: string;
description: string;
moduleName: string;
roles: string[];
};
const updateSettings = async (
id: string,
data: Record<string, string | number | boolean | null>,
) => {
console.log(id, data);
try {
const res = await axios.patch(`/lst/api/system/settings/${id}`, data, {
withCredentials: true,
});
toast.success(`Setting just updated`);
return res;
} catch (err) {
toast.error("Error in updating the settings");
return err;
}
};
export const Route = createFileRoute(
'/_app/_adminLayout/admin/_system/settings',
"/_app/_adminLayout/admin/_system/settings",
)({
component: RouteComponent,
})
component: RouteComponent,
});
function RouteComponent() {
return <div>Hello "/_app/_adminLayout/admin/_system/settings"!</div>
const { data, isLoading, refetch } = useQuery(getSettings());
const columnHelper = createColumnHelper<Settings>();
const submitting = useRef(false);
const updateSetting = useMutation({
mutationFn: ({
id,
field,
value,
}: {
id: string;
field: string;
value: string | number | boolean | null;
}) => updateSettings(id, { [field]: value }),
onSuccess: () => {
// refetch or update cache
refetch();
},
});
const columns = [
columnHelper.accessor("name", {
header: ({ column }) => {
return (
<Button
variant="ghost"
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
>
<span className="flex flex-row gap-2">Name</span>
{column.getIsSorted() === "asc" ? (
<ArrowUp className="ml-2 h-4 w-4" />
) : (
<ArrowDown className="ml-2 h-4 w-4" />
)}
</Button>
);
},
cell: (i) => i.getValue(),
}),
columnHelper.accessor("description", {
header: ({ column }) => {
return (
<Button
variant="ghost"
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
>
<span className="flex flex-row gap-2">Description</span>
{column.getIsSorted() === "asc" ? (
<ArrowUp className="ml-2 h-4 w-4" />
) : (
<ArrowDown className="ml-2 h-4 w-4" />
)}
</Button>
);
},
cell: (i) => i.getValue(),
}),
columnHelper.accessor("value", {
header: ({ column }) => {
return (
<Button
variant="ghost"
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
>
<span className="flex flex-row gap-2">Value</span>
{column.getIsSorted() === "asc" ? (
<ArrowUp className="ml-2 h-4 w-4" />
) : (
<ArrowDown className="ml-2 h-4 w-4" />
)}
</Button>
);
},
cell: ({ row, getValue }) => {
const initialValue = String(getValue() ?? "");
const [localValue, setLocalValue] = useState(initialValue);
const id = row.original.settings_id;
const field = "value";
useEffect(() => setLocalValue(initialValue), [initialValue]);
const handleSubmit = (newValue: string) => {
if (newValue !== initialValue) {
setLocalValue(newValue);
updateSetting.mutate({ id, field, value: newValue });
}
};
return (
<Input
value={localValue}
onChange={(e) => setLocalValue(e.currentTarget.value)}
onBlur={(e) => {
if (!submitting.current) {
submitting.current = true;
handleSubmit(e.currentTarget.value.trim());
setTimeout(() => (submitting.current = false), 100); // reset after slight delay
}
}}
onKeyDown={(e) => {
if (e.key === "Enter") {
e.preventDefault();
submitting.current = true;
handleSubmit(e.currentTarget.value.trim());
e.currentTarget.blur(); // will trigger blur, but we ignore it
setTimeout(() => (submitting.current = false), 100);
}
}}
/>
);
},
}),
columnHelper.accessor("moduleName", {
header: ({ column }) => {
return (
<Button
variant="ghost"
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
>
<span className="flex flex-row gap-2">Module Name</span>
{column.getIsSorted() === "asc" ? (
<ArrowUp className="ml-2 h-4 w-4" />
) : (
<ArrowDown className="ml-2 h-4 w-4" />
)}
</Button>
);
},
cell: ({ row, getValue }) => {
const initialValue = String(getValue() ?? "");
const [localValue, setLocalValue] = useState(initialValue);
const id = row.original.settings_id;
const field = "moduleName";
useEffect(() => setLocalValue(initialValue), [initialValue]);
const handleSubmit = (newValue: string) => {
if (newValue !== initialValue) {
setLocalValue(newValue);
updateSetting.mutate({ id, field, value: newValue });
}
};
return (
<Input
value={localValue}
onChange={(e) => setLocalValue(e.currentTarget.value)}
onBlur={(e) => {
if (!submitting.current) {
submitting.current = true;
handleSubmit(e.currentTarget.value.trim());
setTimeout(() => (submitting.current = false), 100); // reset after slight delay
}
}}
onKeyDown={(e) => {
if (e.key === "Enter") {
e.preventDefault();
submitting.current = true;
handleSubmit(e.currentTarget.value.trim());
e.currentTarget.blur(); // will trigger blur, but we ignore it
setTimeout(() => (submitting.current = false), 100);
}
}}
/>
);
},
}),
];
if (isLoading)
return (
<div>
<span>Loading settings data</span>
</div>
);
return (
<div className="m-2">
<TableNoExpand data={data} columns={columns} />
</div>
);
}

View File

@@ -3,72 +3,60 @@ import ForecastImport from "./ForecastImport";
import OrderImport from "./OrderImport";
export default function DMButtons() {
const { settings } = useSettingStore();
const testServers = ["test1", "test2", "test3"];
const plantToken = settings.filter((n) => n.name === "plantToken");
const { settings } = useSettingStore();
const testServers = ["test1", "test2", "test3"];
const plantToken = settings.filter((n) => n.name === "plantToken");
//console.log(plantToken);
return (
<div className="flex flex-row-reverse gap-1">
<OrderImport fileType={"macro"} name={"Macro Import"} />
{/* dev and testserver sees all */}
{testServers.includes(plantToken[0]?.value) && (
<div className="flex flex-row gap-2">
<OrderImport
fileType={"abbott"}
name={"Abbott truck list"}
/>
<OrderImport
fileType={"energizer"}
name={"Energizer Truck List"}
/>
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
<ForecastImport fileType={"pg"} name={"P&G"} />
<ForecastImport
fileType={"energizer"}
name={"Energizer Forecast"}
/>
</div>
)}
{plantToken[0]?.value === "usday1" && (
<div className="flex flex-row gap-2">
<OrderImport
fileType={"abbott"}
name={"Abbott truck list"}
/>
<OrderImport
fileType={"energizer"}
name={"Energizer Truck List"}
/>
<ForecastImport
fileType={"energizer"}
name={"Energizer Forecast"}
/>
</div>
)}
{plantToken[0]?.value === "usflo1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
</div>
)}
{plantToken[0]?.value === "usstp1" && (
<div className="flex flex-row gap-2"></div>
)}
{plantToken[0]?.value === "usiow1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usiow2" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usksc1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
</div>
);
//console.log(plantToken);
return (
<div className="flex flex-row-reverse gap-1">
<OrderImport fileType={"macro"} name={"Macro Import"} />
{/* dev and testserver sees all */}
{testServers.includes(plantToken[0]?.value) && (
<div className="flex flex-row gap-2">
<OrderImport fileType={"abbott"} name={"Abbott truck list"} />
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} />
<OrderImport fileType={"scj"} name={"SCJ Orders"} />
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
<ForecastImport fileType={"pg"} name={"P&G"} />
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} />
</div>
)}
{plantToken[0]?.value === "usday1" && (
<div className="flex flex-row gap-2">
<OrderImport fileType={"abbott"} name={"Abbott truck list"} />
<OrderImport fileType={"energizer"} name={"Energizer Truck List"} />
<ForecastImport fileType={"energizer"} name={"Energizer Forecast"} />
</div>
)}
{plantToken[0]?.value === "usflo1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"loreal"} name={"VMI Import"} />
</div>
)}
{plantToken[0]?.value === "usstp1" && (
<div className="flex flex-row gap-2"></div>
)}
{plantToken[0]?.value === "usiow1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usiow2" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usksc1" && (
<div className="flex flex-row gap-2">
<ForecastImport fileType={"pg"} name={"P&G"} />
</div>
)}
{plantToken[0]?.value === "usweb1" && (
<div className="flex flex-row gap-2">
<OrderImport fileType={"scj"} name={"SCJ Orders"} />
</div>
)}
</div>
);
}

View File

@@ -12,14 +12,15 @@ import { LstCard } from "../../../extendedUi/LstCard";
export default function Relocate() {
const [bookingIn, setBookingIn] = useState(false);
const form = useForm({
defaultValues: { runningNr: " ", lane: "" },
defaultValues: { runningNr: " ", laneID: "" },
onSubmit: async ({ value }) => {
// Do something with form data
setBookingIn(true);
try {
const res = await axios.post("/lst/old/api/ocp/bookin", {
const res = await axios.post("/lst/old/api/logistics/relocate", {
runningNr: parseInt(value.runningNr),
laneID: parseInt(value.laneID),
});
if (res.data.success) {
@@ -27,15 +28,15 @@ export default function Relocate() {
form.reset();
setBookingIn(false);
} else {
console.log(res.data.data.errors);
toast.error(res.data.data.errors[0]?.message);
form.reset();
console.log(res.data.message);
toast.error(res.data.message);
//form.reset();
setBookingIn(false);
}
} catch (error) {
console.log(error);
toast.error(
"There was an error booking in pallet please validate you entered the correct info and try again.",
"There was an error relocating the pallet please validate the data.",
);
setBookingIn(false);
}
@@ -58,7 +59,7 @@ export default function Relocate() {
validators={{
// We can choose between form-wide and field-specific validators
onChange: ({ value }) =>
value.length > 2
value.length > 0
? undefined
: "Please enter a valid running number",
}}
@@ -83,19 +84,17 @@ export default function Relocate() {
}}
/>
<form.Field
name="lane"
name="laneID"
validators={{
// We can choose between form-wide and field-specific validators
onChange: ({ value }) =>
value.length > 2
? undefined
: "Please enter a valid running number",
value.length > 0 ? undefined : "Please enter a valid lane ID",
}}
children={(field) => {
return (
<div className="">
<Label htmlFor="runningNr" className="mb-2">
Enter lane
<Label htmlFor="laneID" className="mb-2">
Enter lane ID
</Label>
<Input
name={field.name}

View File

@@ -3,17 +3,25 @@ import Relocate from "./commands/Relocate";
import RemoveAsNonReusable from "./commands/RemoveAsNonReusable";
export default function HelperPage() {
const url: string = window.location.host.split(":")[0];
return (
<div className="flex flex-wrap m-2 justify-center">
<div className="m-1">
<Bookin />
</div>
const url: string = window.location.host.split(":")[0];
return (
<div className="flex flex-wrap m-2 justify-center">
<div className="m-1">
<div className="m-1 ">
<Bookin />
</div>
<div className="w-96 m-1">
<Relocate />
</div>
</div>
<div className="m-1">
<RemoveAsNonReusable />
</div>
<div className="m-1">{url === "localhost" && <Relocate />}</div>
</div>
);
<div className="m-1">
{url === "localhost" && (
<div className="m-1">
<RemoveAsNonReusable />
</div>
)}
</div>
</div>
);
}

View File

@@ -19,7 +19,7 @@ import { LstCard } from "../../../extendedUi/LstCard";
export default function TransferToNextLot() {
const [gaylordFilled, setGaylordFilled] = useState([0]);
const [actualAmount, setActualAmount] = useState(0);
const [tab, setTab] = useState("esitmate");
const [tab, setTab] = useState("estimate");
const [typeSwitch, setTypeSwitch] = useState(false);
const { settings } = useSettingStore();
@@ -207,7 +207,7 @@ export default function TransferToNextLot() {
<span>"EOM Transfer"</span>
<Tooltip>
<TooltipTrigger>
<Info className="h-[16px] w-[16px]" />
<Info className="h-4 w-4" />
</TooltipTrigger>
<TooltipContent>
<p>
@@ -223,7 +223,7 @@ export default function TransferToNextLot() {
<span>"Lot Transfer"</span>
<Tooltip>
<TooltipTrigger>
<Info className="h-[16px] w-[16px]" />
<Info className="h-4 w-4" />
</TooltipTrigger>
<TooltipContent>
<p>

View File

@@ -30,6 +30,7 @@ import { useSettingStore } from "../../../-lib/store/useSettings";
const printReason = [
{ key: "printerIssue", label: "Printer Related" },
{ key: "missingRfidTag", label: "Missing or incorrect tag" },
{ key: "multipleTags", label: "More than one tag on pallet." },
{ key: "rfidMissScan", label: "Missed Scan from RFID reader" },
{ key: "strapper", label: "Strapper Error" },
{ key: "manualCheck", label: "20th pallet check" },

View File

@@ -112,7 +112,7 @@ export const readerColumns: ColumnDef<Readers>[] = [
const resetReads = async () => {
setReaderReset(true);
try {
const res = await axios.post("/api/rfid/resetRatio", {
const res = await axios.post("/lst/old/api/rfid/resetRatio", {
reader: name,
});

1129
lstDocs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -15,19 +15,19 @@
"typecheck": "tsc"
},
"dependencies": {
"@docusaurus/core": "^3.9.1",
"@docusaurus/preset-classic": "^3.9.1",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
"react": "^19.0.0",
"react-dom": "^19.0.0"
"@docusaurus/core": "^3.9.2",
"@docusaurus/preset-classic": "^3.9.2",
"@mdx-js/react": "^3.1.1",
"clsx": "^2.1.1",
"prism-react-renderer": "^2.4.1",
"react": "^19.2.0",
"react-dom": "^19.2.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.9.1",
"@docusaurus/tsconfig": "^3.9.1",
"@docusaurus/types": "^3.9.1",
"typescript": "~5.6.2"
"@docusaurus/module-type-aliases": "^3.9.2",
"@docusaurus/tsconfig": "^3.9.2",
"@docusaurus/types": "^3.9.2",
"typescript": "~5.9.3"
},
"browserslist": {
"production": [

View File

@@ -0,0 +1,2 @@
ALTER TABLE "invHistoricalData" ADD COLUMN "whse_id" text DEFAULT '';--> statement-breakpoint
ALTER TABLE "invHistoricalData" ADD COLUMN "whse_name" text DEFAULT 'missing whseName';

File diff suppressed because it is too large Load Diff

View File

@@ -547,6 +547,13 @@
"when": 1763407463567,
"tag": "0077_lucky_texas_twister",
"breakpoints": true
},
{
"idx": 78,
"version": "7",
"when": 1766514890344,
"tag": "0078_cheerful_the_leader",
"breakpoints": true
}
]
}

View File

@@ -1,20 +1,20 @@
import { text, pgTable, timestamp, uuid, jsonb } from "drizzle-orm/pg-core";
import { jsonb, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { z } from "zod";
export const commandLog = pgTable(
"commandLog",
{
commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(),
commandUsed: text("commandUsed").notNull(),
bodySent: jsonb("bodySent").default([]),
reasonUsed: text("reasonUsed"),
add_at: timestamp("add_Date").defaultNow(),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name),
]
"commandLog",
{
commandLog_id: uuid("commandLog_id").defaultRandom().primaryKey(),
commandUsed: text("commandUsed").notNull(),
bodySent: jsonb("bodySent").default([]),
reasonUsed: text("reasonUsed"),
addDate: timestamp("add_Date").defaultNow(),
},
(table) => [
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name),
],
);
// Schema for inserting a user - can be used to validate API requests

View File

@@ -1,36 +1,38 @@
import {
date,
integer,
pgTable,
text,
timestamp,
uuid,
date,
integer,
pgTable,
text,
timestamp,
uuid,
} from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
export const invHistoricalData = pgTable(
"invHistoricalData",
{
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
histDate: date("histDate").notNull(), // this date should always be yesterday when we post it.
plantToken: text("plantToken"),
article: text("article").notNull(),
articleDescription: text("articleDescription").notNull(),
materialType: text("materialType"),
total_QTY: text("total_QTY"),
avaliable_QTY: text("avaliable_QTY"),
coa_QTY: text("coa_QTY"),
held_QTY: text("held_QTY"),
lot_Number: text("lot_number"),
consignment: text("consignment"),
location: text("location"),
upd_user: text("upd_user").default("lst"),
upd_date: timestamp("upd_date").defaultNow(),
}
// (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name),
// ]
"invHistoricalData",
{
inv_id: uuid("inv_id").defaultRandom().primaryKey(),
histDate: date("histDate").notNull(), // this date should always be yesterday when we post it.
plantToken: text("plantToken"),
article: text("article").notNull(),
articleDescription: text("articleDescription").notNull(),
materialType: text("materialType"),
total_QTY: text("total_QTY"),
avaliable_QTY: text("avaliable_QTY"),
coa_QTY: text("coa_QTY"),
held_QTY: text("held_QTY"),
lot_Number: text("lot_number"),
consignment: text("consignment"),
location: text("location"),
whseId: text("whse_id").default(""),
whseName: text("whse_name").default("missing whseName"),
upd_user: text("upd_user").default("lst"),
upd_date: timestamp("upd_date").defaultNow(),
},
// (table) => [
// // uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
// uniqueIndex("role_name").on(table.name),
// ]
);
// Schema for inserting a user - can be used to validate API requests

View File

@@ -10,7 +10,8 @@
"dev:dbgen": " drizzle-kit generate --config=drizzle-dev.config.ts",
"dev:dbmigrate": " drizzle-kit migrate --config=drizzle-dev.config.ts",
"build": "npm run build:server",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y ",
"build:server": "rimraf dist && tsc --build && npm run copy:scripts && xcopy server\\services\\notifications\\utils\\views\\ dist\\server\\services\\notifications\\utils\\views\\ /E /I /Y && npm run build:copySql",
"build:copySql": "xcopy server\\services\\sqlServer\\querys\\newQueries dist\\server\\services\\sqlServer\\querys\\newQueries\\ /E /I /Y ",
"build:frontend": "cd frontend && npm run build",
"build:iisNet": "rimraf dotnetwrapper\\bin && xcopy frontend\\dist dotnetwrapper\\wwwroot /E /I /Y && cd dotnetwrapper && dotnet publish lst-wrapper.csproj --configuration Release --output ../prodBuild",
"copy:scripts": "tsx server/scripts/copyScripts.ts",

View File

@@ -1,92 +1,94 @@
import axios from "axios";
import { createLog } from "../services/logger/logger.js";
import { prodEndpointCreation } from "./createUrl.js";
import { tryCatch } from "./tryCatch.js";
import { createLog } from "../services/logger/logger.js";
type bodyData = any;
type Data = {
endpoint: string;
data: bodyData[];
endpoint: string;
data: bodyData[];
};
/**
*
* @param data
* @param timeoutDelay
* @returns
*/
export const runProdApi = async (data: Data) => {
/**
* Detachs a silo
*/
let url = await prodEndpointCreation(data.endpoint);
let url = await prodEndpointCreation(data.endpoint);
const { data: d, error } = await tryCatch(
axios.post(url, data.data[0], {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
}),
);
const { data: d, error } = await tryCatch(
axios.post(url, data.data[0], {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
})
);
let e = error as any;
if (e) {
//console.log(e.response);
if (e.status === 401) {
createLog(
"error",
"lst",
"logistics",
`Not authorized: ${JSON.stringify(e.response?.data)}`,
);
const data = {
success: false,
message: `Not authorized: ${JSON.stringify(e.response?.data)}`,
data: {
status: e.response?.status,
statusText: e.response?.statusText,
data: e.response?.data,
},
};
return data;
} else {
createLog(
"error",
"lst",
"logistics",
`There was an error processing the endpoint: ${JSON.stringify(
e.response?.data,
)}`,
);
return {
success: false,
message: `There was an error processing the endpoint: ${JSON.stringify(
e.response?.data,
)}`,
data: {
status: e.response?.status,
statusText: e.response?.statusText,
data: e.response?.data,
},
};
}
}
let e = error as any;
if (e) {
//console.log(e.response);
if (e.status === 401) {
createLog(
"error",
"lst",
"logistics",
`Not autorized: ${JSON.stringify(e.response?.data)}`
);
const data = {
success: false,
message: `Not autorized: ${JSON.stringify(e.response?.data)}`,
data: {
status: e.response?.status,
statusText: e.response?.statusText,
data: e.response?.data,
},
};
return data;
} else {
createLog(
"error",
"lst",
"logistics",
`There was an error processing the endpoint: ${JSON.stringify(
e.response?.data
)}`
);
return {
success: false,
message: `There was an error processing the endpoint: ${JSON.stringify(
e.response?.data
)}`,
data: {
status: e.response?.status,
statusText: e.response?.statusText,
data: e.response?.data,
},
};
}
}
if (d?.status !== 200) {
return {
success: false,
message: "Error processing endpoint",
data: {
status: d?.status,
statusText: d?.statusText,
data: d?.data,
},
};
} else {
return {
success: true,
message: "Endpoint was processed",
data: {
status: d.status,
statusText: d.statusText,
data: d.data,
},
};
}
if (d?.status !== 200) {
return {
success: false,
message: "Error processing endpoint",
data: {
status: d?.status,
statusText: d?.statusText,
data: d?.data,
},
};
} else {
return {
success: true,
message: "Endpoint was processed",
data: {
status: d.status,
statusText: d.statusText,
data: d.data,
},
};
}
};

View File

@@ -0,0 +1,191 @@
/**
* Using this to make a scanner connection to the server.
*/
import net from "net";
interface QueuedCommand {
command: string;
resolve: (value: string) => void;
reject: (reason?: any) => void;
timeout: NodeJS.Timeout;
}
const STX = "\x02";
const ETX = "\x03";
// const prodIP = process.env.SERVER_IP as string;
// const prodPort = parseInt(process.env.SCANNER_PORT || "50000", 10);
// const scannerID = `${process.env.SCANNER_ID}@`;
//const scannerCommand = "AlplaPRODcmd00000042#000028547"; // top of the picksheet
export class ScannerClient {
private socket = new net.Socket();
private connected = false;
private queue: QueuedCommand[] = [];
private processing = false;
private incomingBuffer = "";
constructor(
private host: string,
private port: number,
private scannerId: string,
) {
this.initialize();
}
private initialize() {
if (!this.host || !this.port) {
console.log("Host or port is missing");
return;
}
this.socket.connect(this.port, this.host, () => {
console.info("Connected to scanner");
this.connected = true;
});
this.socket.on("data", (data) => this.handleData(data));
this.socket.on("close", () => {
console.log("Scanner connection closed");
this.connected = false;
});
this.socket.on("error", (err) => {
console.error("Scanner error:", err);
});
}
// ✅ Public method you use
public scan(command: string): Promise<string> {
if (!this.connected) {
return Promise.reject("Scanner not connected");
}
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
this.processing = false;
reject("Scanner timeout");
this.processQueue();
}, 5000); // 5s safety timeout
this.queue.push({
command,
resolve,
reject,
timeout,
});
this.processQueue();
});
}
// ✅ Ensures strict FIFO processing
private processQueue() {
if (this.processing) return;
if (this.queue.length === 0) return;
this.processing = true;
const current = this.queue[0];
const message = Buffer.from(
`${STX}${this.scannerId}${current.command}${ETX}`,
"ascii",
);
this.socket.write(message);
}
// ✅ Handles full STX/ETX framed responses
private handleData(data: Buffer) {
console.log(
"ASCII:",
data
.toString("ascii")
.replace(/\x00/g, "") // remove null bytes
.replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
.trim(),
);
const current = this.queue.shift();
if (current) {
clearTimeout(current.timeout);
current.resolve(data.toString("ascii"));
}
this.processing = false;
this.processQueue();
}
}
export const scanner = new ScannerClient(
process.env.SERVER_IP!,
parseInt(process.env.SCANNER_PORT!, 10),
`${process.env.SCANNER_ID}@`,
);
// export const connectToScanner = () => {
// if (!process.env.SERVER_IP || !process.env.SCANNER_PORT) {
// return {
// success: false,
// message: "Missing ServerIP or ServerPort",
// };
// }
// scanner.connect(prodPort, prodIP, () => {
// console.log("Connected to scanner");
// connected = true;
// });
// };
// export const scan = async (command: string) => {
// if (!connected) {
// return {
// success: false,
// message: "Scanner is not connected, please contact admin",
// };
// }
// if (inScanCommand) {
// bufferCommands.push({ timeStamp: new Date(Date.now()), command: command });
// }
// // we are going to set to scanning
// inScanCommand = true;
// const message = Buffer.from(`${STX}${scannerID}${command}${ETX}`, "ascii");
// scanner.write(message);
// await new Promise((resolve) => setTimeout(resolve, 750));
// inScanCommand = false;
// if (bufferCommands.length > 0) {
// await scan(bufferCommands[0].command);
// bufferCommands.shift();
// }
// return {
// success: true,
// message: "Scan completed",
// };
// };
// scanner.on("data", async (data) => {
// console.log(
// "Response:",
// data
// .toString("ascii")
// .replace(/\x00/g, "") // remove null bytes
// .replace(/\x1B\[[0-9;?]*[A-Za-z]/g, "") // remove ANSI escape codes
// .trim(),
// );
// });
// scanner.on("close", () => {
// console.log("Connection closed");
// });
// scanner.on("error", (err) => {
// console.error("Scanner error:", err);
// });

View File

@@ -1,84 +1,95 @@
import { addDays, format } from "date-fns";
import { formatInTimeZone } from "date-fns-tz";
import { eq } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { settings } from "../../../../database/schema/settings.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { deliveryByDateRange } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js";
import { addDays, format } from "date-fns";
export const getDeliveryByDateRange = async (data: any | null) => {
// const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken"))
// );
// if (plantError) {
// return {
// success: false,
// message: "Error getting Settings",
// data: plantError,
// };
// }
let deliverys: any = [];
// const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken"))
// );
// if (plantError) {
// return {
// success: false,
// message: "Error getting Settings",
// data: plantError,
// };
// }
let deliverys: any = [];
let updatedQuery = deliveryByDateRange;
let updatedQuery = deliveryByDateRange;
// start days can be sent over
if (data?.start) {
updatedQuery = updatedQuery.replaceAll("[startDate]", data.start[0]);
} else {
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
}
// start days can be sent over
if (data?.start) {
updatedQuery = updatedQuery.replaceAll("[startDate]", data.start[0]);
} else {
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
}
// end days can be sent over
if (data?.end) {
updatedQuery = updatedQuery.replaceAll("[endDate]", data.end[0]);
} else {
const defaultEndDate = format(
addDays(new Date(Date.now()), 5),
"yyyy-M-d"
);
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
}
// end days can be sent over
if (data?.end) {
updatedQuery = updatedQuery.replaceAll("[endDate]", data.end[0]);
} else {
const defaultEndDate = format(addDays(new Date(Date.now()), 5), "yyyy-M-d");
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
}
try {
const res: any = await query(
updatedQuery,
"Get Delivery by date range"
);
deliverys = res.data;
//console.log(res.data);
} catch (error) {
console.log(error);
return {
success: false,
message: "All Deliveries within the range.",
data: error,
};
}
try {
const res: any = await query(updatedQuery, "Get Delivery by date range");
deliverys = res.data;
//console.log(res.data);
} catch (error) {
console.log(error);
return {
success: false,
message: "All Deliveries within the range.",
data: error,
};
}
if (!data) {
deliverys = deliverys.splice(1000, 0);
}
// add plant token in
// const pOrders = deliverys.map((item: any) => {
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
// // month: "numeric",
// // day: "numeric",
// // year: "numeric",
// // hour: "2-digit",
// // minute: "2-digit",
// // hour12: false,
// // });
// if (!data) {
// deliverys = deliverys.splice(1000, 0);
// }
// add plant token in
// const pOrders = deliverys.map((item: any) => {
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
// // month: "numeric",
// // day: "numeric",
// // year: "numeric",
// // hour: "2-digit",
// // minute: "2-digit",
// // hour12: false,
// // });
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
// return {
// plantToken: plantToken[0].value,
// ...item,
// loadingDate: dateCon,
// deliveryDate: delDate,
// };
// });
return { success: true, message: "Current open orders", data: deliverys };
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
// return {
// plantToken: plantToken[0].value,
// ...item,
// loadingDate: dateCon,
// deliveryDate: delDate,
// };
// });
return {
success: true,
message: "Current open orders",
data: deliverys.map((i: any) => {
const orderDate = new Date(i.OrderDate);
const delDate = new Date(i.DeliveryDate);
const loadDate = new Date(i.LoadingDate);
return {
...i,
OrderDate: format(orderDate, "yyyy-MM-dd HH:mm"),
DeliveryDate: format(delDate, "yyyy-MM-dd HH:mm"),
LoadingDate: format(loadDate, "yyyy-MM-dd HH:mm"),
dbDate: i.DeliveryDate,
};
}),
};
};

View File

@@ -1,84 +1,97 @@
import { addDays, format } from "date-fns";
import { query } from "../../sqlServer/prodSqlServer.js";
import { deliveryByDateRangeAndAv } from "../../sqlServer/querys/dataMart/deleveryByDateRange.js";
import { addDays, format } from "date-fns";
export const getDeliveryByDateRangeAndAv = async (
avs: string,
startDate: string,
endDate: string
avs: string,
startDate: string,
endDate: string,
) => {
// const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken"))
// );
// if (plantError) {
// return {
// success: false,
// message: "Error getting Settings",
// data: plantError,
// };
// }
let deliverys: any = [];
// const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken"))
// );
// if (plantError) {
// return {
// success: false,
// message: "Error getting Settings",
// data: plantError,
// };
// }
let deliverys: any = [];
let updatedQuery = deliveryByDateRangeAndAv;
let updatedQuery = deliveryByDateRangeAndAv;
// start days can be sent over
if (startDate) {
updatedQuery = updatedQuery.replaceAll("[startDate]", startDate);
} else {
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
}
// start days can be sent over
if (startDate) {
updatedQuery = updatedQuery.replaceAll("[startDate]", startDate);
} else {
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
}
// end days can be sent over
if (endDate) {
updatedQuery = updatedQuery.replaceAll("[endDate]", endDate);
} else {
const defaultEndDate = format(
addDays(new Date(Date.now()), 5),
"yyyy-M-d"
);
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
}
// end days can be sent over
if (endDate) {
updatedQuery = updatedQuery.replaceAll("[endDate]", endDate);
} else {
const defaultEndDate = format(addDays(new Date(Date.now()), 5), "yyyy-M-d");
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
}
try {
const res: any = await query(
updatedQuery.replace("[articles]", avs),
"Get Delivery by date range"
);
deliverys = res.data;
//console.log(res.data);
} catch (error) {
console.log(error);
return {
success: false,
message: "All Deliveries within the range.",
data: error,
};
}
try {
const res: any = await query(
updatedQuery.replace("[articles]", avs),
"Get Delivery by date range",
);
deliverys = res.data;
//console.log(res.data);
} catch (error) {
console.log(error);
return {
success: false,
message: "All Deliveries within the range.",
data: error,
};
}
// if (!data) {
// deliverys = deliverys.splice(1000, 0);
// }
// add plant token in
// const pOrders = deliverys.map((item: any) => {
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
// // month: "numeric",
// // day: "numeric",
// // year: "numeric",
// // hour: "2-digit",
// // minute: "2-digit",
// // hour12: false,
// // });
// if (!data) {
// deliverys = deliverys.splice(1000, 0);
// }
// add plant token in
// const pOrders = deliverys.map((item: any) => {
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
// // month: "numeric",
// // day: "numeric",
// // year: "numeric",
// // hour: "2-digit",
// // minute: "2-digit",
// // hour12: false,
// // });
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
// return {
// plantToken: plantToken[0].value,
// ...item,
// loadingDate: dateCon,
// deliveryDate: delDate,
// };
// });
return { success: true, message: "Current open orders", data: deliverys };
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
// return {
// plantToken: plantToken[0].value,
// ...item,
// loadingDate: dateCon,
// deliveryDate: delDate,
// };
// });
return {
success: true,
message: "Current open orders",
data: deliverys.map((i: any) => {
const orderDate = new Date(i.OrderDate);
const delDate = new Date(i.DeliveryDate);
const loadDate = new Date(i.LoadingDate);
return {
...i,
OrderDate: format(orderDate, "yyyy-MM-dd HH:mm"),
DeliveryDate: format(delDate, "yyyy-MM-dd HH:mm"),
LoadingDate: format(loadDate, "yyyy-MM-dd HH:mm"),
dbDate: i.DeliveryDate,
};
}),
};
};

View File

@@ -0,0 +1,81 @@
import { addDays, format } from "date-fns";
import { query } from "../../sqlServer/prodSqlServer.js";
import { inhouseDelivery } from "../../sqlServer/querys/dataMart/inhouseDelivery.js";
export const getInhouseDeliveryByDateRange = async (data: any | null) => {
// const { data: plantToken, error: plantError } = await tryCatch(
// db.select().from(settings).where(eq(settings.name, "plantToken"))
// );
// if (plantError) {
// return {
// success: false,
// message: "Error getting Settings",
// data: plantError,
// };
// }
let deliverys: any = [];
let updatedQuery = inhouseDelivery;
// start days can be sent over
if (data?.start) {
updatedQuery = updatedQuery.replaceAll("[startDate]", data.start[0]);
} else {
updatedQuery = updatedQuery.replaceAll("[startDate]", "1990-1-1");
}
// end days can be sent over
if (data?.end) {
updatedQuery = updatedQuery.replaceAll("[endDate]", data.end[0]);
} else {
const defaultEndDate = format(addDays(new Date(Date.now()), 5), "yyyy-M-d");
updatedQuery = updatedQuery.replaceAll("[endDate]", defaultEndDate);
}
try {
const res: any = await query(
updatedQuery,
"Get inhouse Delivery by date range",
);
deliverys = res.data;
//console.log(res.data);
} catch (error) {
console.log(error);
return {
success: false,
message: "All In-House Deliveries within the range.",
data: error,
};
}
if (!data) {
deliverys = deliverys.splice(1000, 0);
}
// add plant token in
// const pOrders = deliverys.map((item: any) => {
// // const dateCon = new Date(item.loadingDate).toLocaleString("en-US", {
// // month: "numeric",
// // day: "numeric",
// // year: "numeric",
// // hour: "2-digit",
// // minute: "2-digit",
// // hour12: false,
// // });
// //const dateCon = new Date(item.loadingDate).toISOString().replace("T", " ").split(".")[0];
// const dateCon = new Date(item.loadingDate).toISOString().split("T")[0];
// //const delDate = new Date(item.deliveryDate).toISOString().replace("T", " ").split(".")[0];
// const delDate = new Date(item.deliveryDate).toISOString().split("T")[0];
// return {
// plantToken: plantToken[0].value,
// ...item,
// loadingDate: dateCon,
// deliveryDate: delDate,
// };
// });
return {
success: true,
message: "Current In-House deliveries by range",
data: deliverys,
};
};

View File

@@ -0,0 +1,52 @@
import { format } from "date-fns-tz/format";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { forecastData } from "../../sqlServer/querys/psiReport/forecast.js";
// type ArticleData = {
// id: string
// }
export const getGetPSIForecastData = async (customer: string) => {
let articles: any = [];
let queryData = forecastData;
console.log(customer);
if (customer) {
queryData = forecastData.replace("[customer]", customer);
}
const { data, error } = (await tryCatch(
query(queryData, "PSI forecast info"),
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the forecast info: ${JSON.stringify(error)}`,
);
return {
success: false,
messsage: `There was an error getting the forecast info`,
data: error,
};
}
articles = data.data;
return {
success: true,
message: "PSI forecast Data",
data: articles.map((i: any) => {
const requirementDate = new Date(i.requirementDate);
return {
...i,
requirementDate: format(requirementDate, "yyyy-MM-dd"),
dbDate: i.requirementDate,
};
}),
};
};

View File

@@ -1,4 +1,4 @@
import { and, between, inArray, sql } from "drizzle-orm";
import { and, between, inArray, notInArray, sql } from "drizzle-orm";
import { db } from "../../../../database/dbclient.js";
import { invHistoricalData } from "../../../../database/schema/historicalINV.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
@@ -8,56 +8,79 @@ import { createLog } from "../../logger/logger.js";
// id: string
// }
export const psiGetInventory = async (
avs: string,
startDate: string,
endDate: string
avs: string,
startDate: string,
endDate: string,
whseToInclude: string,
exludeLanes: string
) => {
let articles: any = [];
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const ids = avs.split(",").map((id) => id.trim());
const ids = avs.split(",").map((id) => id.trim());
const whse = whseToInclude
? whseToInclude
.split(",")
.map((w) => w.trim())
.filter(Boolean)
: [];
const { data, error } = (await tryCatch(
db
.select()
.from(invHistoricalData)
.where(
and(
inArray(invHistoricalData.article, ids),
between(invHistoricalData.histDate, startDate, endDate)
)
)
//.limit(100)
)) as any;
const locations = exludeLanes
? exludeLanes.split(",").map((l) => l.trim()).filter(Boolean)
: [];
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
const conditions = [
inArray(invHistoricalData.article, ids),
between(invHistoricalData.histDate, startDate, endDate),
];
articles = data;
console.log(articles.length);
return {
success: true,
message: "PSI planning Data",
data: articles,
};
// only add the warehouse condition if there are any whse values
if (whse.length > 0) {
console.log("adding whse to include in");
conditions.push(inArray(invHistoricalData.whseId, whse));
}
// locations we dont want in the system
if (locations.length > 0) {
console.log("adding excluded lanes in ",locations);
conditions.push(notInArray(invHistoricalData.location, locations));
}
const query = db
.select()
.from(invHistoricalData)
.where(and(...conditions));
// optional tryCatch or await as you had
const { data, error } = (await tryCatch(query)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(error)}`,
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data;
console.log(articles.length);
return {
success: true,
message: "PSI planning Data",
data: articles,
};
};

View File

@@ -2,62 +2,72 @@ import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { createLog } from "../../logger/logger.js";
import { query } from "../../sqlServer/prodSqlServer.js";
import { planningNumbersByAVDate } from "../../sqlServer/querys/psiReport/planningNumbersByAv.js";
import { improvedPsiPlanningInfo } from "./psiPlanningDataImproved.js";
// type ArticleData = {
// id: string
// }
export const psiGetPlanningData = async (
avs: string,
startDate: string,
endDate: string
avs: string,
startDate: string,
endDate: string,
) => {
let articles: any = [];
let articles: any = [];
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
if (!avs) {
return {
success: false,
message: `Missing av's please send at least one over`,
data: [],
};
}
const { data, error } = (await tryCatch(
query(
planningNumbersByAVDate
.replace("[articles]", avs)
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"PSI planning info"
)
)) as any;
const { data, error } = (await tryCatch(
query(
planningNumbersByAVDate
.replace("[articles]", avs)
.replace("[startDate]", startDate)
.replace("[endDate]", endDate),
"PSI planning info",
),
)) as any;
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(
error
)}`
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
// improvedPsiPlanningInfo({
// avs,
// startDate,
// endDate,
// });
if (error) {
createLog(
"error",
"datamart",
"datamart",
`There was an error getting the planning info: ${JSON.stringify(error)}`,
);
return {
success: false,
messsage: `There was an error getting the planning info`,
data: error,
};
}
articles = data.data;
// TODO: if we are not running planning we no pass the old structure if we are running new planning use the below improved version that makes sure we dont have negative numebrs.
articles = data.data;
return {
success: true,
message: "PSI planning Data",
data: articles.map((n: any) => {
if (n.PalDay) {
return { ...n, PalDay: n.PalDay.toFixed(2) };
}
return {
success: true,
message: "PSI planning Data",
data: await improvedPsiPlanningInfo({
avs,
startDate,
endDate,
}),
// data: articles.map((n: any) => {
// if (n.PalDay) {
// return { ...n, PalDay: n.PalDay.toFixed(2) };
// }
return n;
}),
};
// return n;
// }),
};
};

View File

@@ -0,0 +1,171 @@
import { format } from "date-fns-tz";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { query } from "../../sqlServer/prodSqlServer.js";
const improvedQuery = `
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
SELECT
[RunningNumber] as lot
,[ProfitCentreDescription]
,[MachineDescription]
,[ArticleHumanReadableId]
,[ArticleDescription]
,[DeliveryAddressHumanReadableId]
,[DeliveryAddressDescription]
,[MouldHumanReadableId]
,[BlowheadHumanReadableId1]
,[PackagingInstructionHumanReadableId]
,[PackagingInstructionDescription]
,[MainMaterialHumanReadableId]
,[MainMaterialDescription]
,[CompoundHumanReadableId]
,[CompoundDescription]
,[ProductionLotState]
,[PlanType]
,[ProducedQuantityLoadingUnit]
,[ProducedQuantityPieces]
,[PlanStart]
,[PlanEnd]
,[ProdStart]
,[TheoreticEnd]
,[ProdDuration]
,[SetupDuration]
,[StartupDuration]
,[NetEquipmentEfficiency]
,[UtilisationDuration]
,[CycleTime]
,[Cavities]
,[FixedQuantity]
,[ProducedQuantityTrucks]
,[ProducedQuantityTradeUnit]
,[MaxRegrind]
,[Conflict]
,[ProductionOrderHumanReadableId]
,[ProductionDataImportSource]
,[Remark]
,[BlowheadDescription1]
,[MouldDescription]
,[ProcessLossPercentage]
,[SetupTypeNumberOfPersons]
,[UnplannedDowntimePercentage]
,[PlanQuantityLoadingUnit]
,[PlanQuantityPieces]
,[PlanQuantityTradeUnit]
,[PlanQuantityTrucks]
,[PublishState]
,[LastChange]
,[MaterialConsumed]
,[MaterialStaged]
,[MachineLocation]
,[HasPrioritization]
,[ArticleAlias]
FROM [test1_AlplaPROD2.0_Read].[productionScheduling].[ProductionLot] with (nolock)
where PlanEnd between @StartDate and @EndDate
and ArticleHumanReadableId in ([articles])
and PublishState = 1
order by PlanStart
`;
export const improvedPsiPlanningInfo = async (something: any) => {
const { data, error } = (await tryCatch(
query(
improvedQuery
.replace("[articles]", something.avs)
.replace("[startDate]", something.startDate)
.replace("[endDate]", something.endDate),
"PSI planning info",
),
)) as any;
// add error handling in later here
return splitProduction(data.data);
};
const splitProduction = (runs: any) => {
const results: any = [];
const WORKDAY_START_HOUR = 7; // 07:00 start well later get this from the shift def
runs.forEach((e: any) => {
const {
PlanStart,
PlanEnd,
PlanQuantityPieces,
ArticleHumanReadableId,
ProdDuration,
} = e;
const prodStart: any = new Date(PlanStart);
const prodEnd: any = new Date(PlanEnd);
const prodDuration = ProdDuration
? ProdDuration * 60 * 60 * 1000
: prodEnd - prodStart;
// get the prod date the production falls under
function getProdDayStart(date: Date) {
const d = new Date(date);
d.setHours(WORKDAY_START_HOUR, 0, 0, 0);
if (date.getHours() < WORKDAY_START_HOUR) {
// before 07:00, belongs to previous calendar day
d.setDate(d.getDate() - 1);
}
return d;
}
// current pointer starts at the work-day start that contains our start time
let currentStart = new Date(prodStart);
let prodDayStart = getProdDayStart(currentStart);
while (prodDayStart < prodEnd) {
// 1⃣ The next days start = prodDayStart + 1 day at 07:00
const nextProdDayStart = new Date(prodDayStart);
nextProdDayStart.setDate(nextProdDayStart.getDate() + 1);
// 2⃣ Segment end is either the next work-day start or the actual end, whichever is sooner
const segmentEnd = new Date(
Math.min(nextProdDayStart.getTime(), prodEnd.getTime()),
);
// 3⃣ Determine overlap window within (startTime..endTime)
const segStart: any = new Date(
Math.max(prodDayStart.getTime(), prodStart.getTime()),
);
const segEnd: any = segmentEnd;
if (segEnd > segStart) {
const segMs = segEnd - segStart;
const proportion = segMs / prodDuration;
const qty = PlanQuantityPieces * proportion;
const pal = e.PlanQuantityLoadingUnit * proportion;
results.push({
Article: ArticleHumanReadableId,
Description: e.ArticleAlias,
MachineId: e.MachineLocation,
MachineName: e.MachineDescription,
LotNumber: e.lot,
ProductionDay: format(prodDayStart, "M/d/yyyy"),
TotalPlanned: e.PlanQuantityPieces,
// PlanEnd,
// TheoreticEnd,
QTYPerDay: parseInt(qty.toFixed(0)),
PalDay: parseFloat(pal.toFixed(2)),
finished: e.ProductionLotState === 3 ? 1 : 0,
cavities: e.Cavities,
//prodDuration,
});
}
// move to next production-day window
prodDayStart = nextProdDayStart;
}
});
return results;
};

View File

@@ -1,43 +1,48 @@
import { OpenAPIHono } from "@hono/zod-openapi";
import activequerys from "./route/getCurrentQuerys.js";
import getArticles from "./route/getActiveArticles.js";
import currentInv from "./route/getInventory.js";
import getCustomerInv from "./route/getCustomerInv.js";
import getOpenOrders from "./route/getOpenOrders.js";
import getDeliveryByDate from "./route/getDeliveryDateByRange.js";
import fakeEDI from "./route/fakeEDI.js";
import getArticles from "./route/getActiveArticles.js";
import addressCorrections from "./route/getCityStateData.js";
import activequerys from "./route/getCurrentQuerys.js";
import getCustomerInv from "./route/getCustomerInv.js";
import getDeliveryByDate from "./route/getDeliveryDateByRange.js";
import getDeliveryByDateRangeAndAv from "./route/getDeliveryDateByRangeAndAv.js";
import fifoIndex from "./route/getFifoIndex.js";
import financeAudit from "./route/getFinanceAudit.js";
import getForecastByAv from "./route/getForecastDataByAv.js";
import getInhouseDeliveryByDate from "./route/getInHouseDeliveryDateByRange.js";
import currentInv from "./route/getInventory.js";
import getOpenOrders from "./route/getOpenOrders.js";
import psiArticleData from "./route/getPsiArticleData.js";
import psiForecastData from "./route/getPsiForecast.js";
import psiInventory from "./route/getPsiinventory.js";
import psiPlanningData from "./route/getPsiPlanningData.js";
import psiProductionData from "./route/getPsiProductionData.js";
import psiInventory from "./route/getPsiinventory.js";
import getForecastByAv from "./route/getForecastDataByAv.js";
import getDeliveryByDateRangeAndAv from "./route/getDeliveryDateByRangeAndAv.js";
const app = new OpenAPIHono();
const routes = [
activequerys,
getArticles,
currentInv,
getCustomerInv,
getOpenOrders,
getDeliveryByDate,
getDeliveryByDateRangeAndAv,
getForecastByAv,
fakeEDI,
addressCorrections,
fifoIndex,
financeAudit,
psiArticleData,
psiPlanningData,
psiProductionData,
psiInventory,
activequerys,
getArticles,
currentInv,
getCustomerInv,
getOpenOrders,
getDeliveryByDate,
getInhouseDeliveryByDate,
getDeliveryByDateRangeAndAv,
getForecastByAv,
fakeEDI,
addressCorrections,
fifoIndex,
financeAudit,
psiArticleData,
psiPlanningData,
psiProductionData,
psiInventory,
psiForecastData,
] as const;
const appRoutes = routes.forEach((route) => {
app.route("/datamart", route);
app.route("/datamart", route);
});
export default app;

View File

@@ -1,146 +1,153 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
const app = new OpenAPIHono({ strict: false });
const current: any = [
{
name: "getActiveAv",
endpoint: "/api/datamart/getarticles",
description: "Gets all current active AV, with specific critiera.",
},
// {
// name: "getStockLaneDims",
// endpoint: "/api/v1/masterData/getStockDims",
// description: "Returns the lane dims along with a column to send actaul dims to be updated.",
// },
// {
// name: "getAddressInfo",
// endpoint: "/api/v1/masterData/getAddressInfo",
// description: "Returns current active addresses with street and zip",
// },
// {
// name: "getMissingPkgData",
// endpoint: "/api/v1/masterData/getMissingPKGData",
// description: "Returns all packaging data that is missing either printer, layout, or carton layout",
// },
{
name: "getCustomerInventory",
endpoint: "/api/datamart/getcustomerinventory",
description:
"Returns specific customer inventory based on there address ID, with optional to include warehouses, IE 36,41,5. leaving warehouse blank will just pull everything",
criteria: "customer,whseToInclude",
},
// {
// name: "getPalletLabels",
// endpoint: "/api/v1/masterData/getPalletLabels",
// description: "Returns specific amount of pallets RN, Needs label number and printer, Specfic to Dayton.",
// criteria: "runningNumber,printerName,count",
// },
{
name: "getopenorders",
endpoint: "/api/datamart/getopenorders",
description:
"Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
criteria: "sDay,eDay",
},
// {
// name: "getOpenIncoming",
// endpoint: "/api/v1/masterData/getOpenIncoming",
// description:
// "Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
// criteria: "sDay,eDay",
// },
// {
// name: "planningCheckPkg",
// endpoint: "/api/v1/masterData/planningPkgCheck",
// description: "Returns all lots starting later than today and has a pkg that is missing layouts.",
// },
{
name: "getinventory",
endpoint: "/api/datamart/getinventory",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all inventory, excludes inv locations. no running numbers",
criteria: "includeRunnningNumbers", // uncomment this out once the improt process can be faster
},
// {
// name: "getOpenOrderUpdates",
// endpoint: "/api/v1/masterData/getOpenOrderUpdates",
// // description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
// description: "Returns all orders based on customer id, leaving empty will pull everythinng in.",
// criteria: "customer", // uncomment this out once the improt process can be faster
// },
{
name: "getSiloAdjustment",
endpoint: "/api/logistics/getsilosdjustment",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all siloadjustments in selected date range IE: 1/1/2025 to 1/31/2025",
criteria: "startDate,endDate", // uncomment this out once the improt process can be faster
},
{
name: "Delivery by date trange",
endpoint: "/api/datamart/deliverybydaterange",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all Deliverys in selected date range IE: 1/1/2025 to 1/31/2025",
criteria: "start,end", // uncomment this out once the improt process can be faster
},
{
name: "Fake Edi Update",
endpoint: "/api/datamart/fakeediupdate",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all open orders to correct and resubmit, leaving blank will get everything putting an address only returns the specified address",
criteria: "address", // uncomment this out once the improt process can be faster
},
{
name: "Address Corrections",
endpoint: "/api/datamart/getaddressdata",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all addresses that will not process correctly in tms due to incorrect city state setup.",
//criteria: "address", // uncomment this out once the improt process can be faster
},
{
name: "Fifo index",
endpoint: "/api/datamart/getfifoindex",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns fifo index for all pallets shipped within the last 90 days.",
//criteria: "address", // uncomment this out once the improt process can be faster
},
{
name: "Finance Audit inv",
endpoint: "/api/datamart/getfinanceaudit",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all inventory past the date provided, ie: 5/31/2025",
criteria: "date", // uncomment this out once the improt process can be faster
},
{
name: "getActiveAv",
endpoint: "/api/datamart/getarticles",
description: "Gets all current active AV, with specific critiera.",
},
// {
// name: "getStockLaneDims",
// endpoint: "/api/v1/masterData/getStockDims",
// description: "Returns the lane dims along with a column to send actaul dims to be updated.",
// },
// {
// name: "getAddressInfo",
// endpoint: "/api/v1/masterData/getAddressInfo",
// description: "Returns current active addresses with street and zip",
// },
// {
// name: "getMissingPkgData",
// endpoint: "/api/v1/masterData/getMissingPKGData",
// description: "Returns all packaging data that is missing either printer, layout, or carton layout",
// },
{
name: "getCustomerInventory",
endpoint: "/api/datamart/getcustomerinventory",
description:
"Returns specific customer inventory based on there address ID, with optional to include warehouses, IE 36,41,5. leaving warehouse blank will just pull everything",
criteria: "customer,whseToInclude",
},
// {
// name: "getPalletLabels",
// endpoint: "/api/v1/masterData/getPalletLabels",
// description: "Returns specific amount of pallets RN, Needs label number and printer, Specfic to Dayton.",
// criteria: "runningNumber,printerName,count",
// },
{
name: "getopenorders",
endpoint: "/api/datamart/getopenorders",
description:
"Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
criteria: "sDay,eDay",
},
// {
// name: "getOpenIncoming",
// endpoint: "/api/v1/masterData/getOpenIncoming",
// description:
// "Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
// criteria: "sDay,eDay",
// },
// {
// name: "planningCheckPkg",
// endpoint: "/api/v1/masterData/planningPkgCheck",
// description: "Returns all lots starting later than today and has a pkg that is missing layouts.",
// },
{
name: "getinventory",
endpoint: "/api/datamart/getinventory",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all inventory, excludes inv locations. no running numbers",
criteria: "includeRunnningNumbers", // uncomment this out once the improt process can be faster
},
// {
// name: "getOpenOrderUpdates",
// endpoint: "/api/v1/masterData/getOpenOrderUpdates",
// // description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
// description: "Returns all orders based on customer id, leaving empty will pull everythinng in.",
// criteria: "customer", // uncomment this out once the improt process can be faster
// },
{
name: "getSiloAdjustment",
endpoint: "/api/logistics/getsilosdjustment",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all siloadjustments in selected date range IE: 1/1/2025 to 1/31/2025",
criteria: "startDate,endDate", // uncomment this out once the improt process can be faster
},
{
name: "Delivery by date range",
endpoint: "/api/datamart/deliverybydaterange",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all Deliverys in selected date range IE: 1/1/2025 to 1/31/2025",
criteria: "start,end", // uncomment this out once the improt process can be faster
},
{
name: "In House Delivery by date range",
endpoint: "/api/datamart/inhousedeliverybydaterange",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all in-house deliveries in selected date range IE: 1/1/2025 to 1/31/2025",
criteria: "start,end", // uncomment this out once the improt process can be faster
},
{
name: "Fake Edi Update",
endpoint: "/api/datamart/fakeediupdate",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all open orders to correct and resubmit, leaving blank will get everything putting an address only returns the specified address",
criteria: "address", // uncomment this out once the improt process can be faster
},
{
name: "Address Corrections",
endpoint: "/api/datamart/getaddressdata",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns all addresses that will not process correctly in tms due to incorrect city state setup.",
//criteria: "address", // uncomment this out once the improt process can be faster
},
{
name: "Fifo index",
endpoint: "/api/datamart/getfifoindex",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description:
"Returns fifo index for all pallets shipped within the last 90 days.",
//criteria: "address", // uncomment this out once the improt process can be faster
},
{
name: "Finance Audit inv",
endpoint: "/api/datamart/getfinanceaudit",
// description: "Returns all inventory, by default excludes running numebrs, also excludes inv locations.",
description: "Returns all inventory past the date provided, ie: 5/31/2025",
criteria: "date", // uncomment this out once the improt process can be faster
},
];
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns all avalible querys.",
method: "get",
path: "/getavalibleaquerys",
createRoute({
tags: ["dataMart"],
summary: "Returns all avalible querys.",
method: "get",
path: "/getavalibleaquerys",
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/getavalibleaquerys" });
responses: responses(),
}),
async (c) => {
//const body = await c.req.json();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/getavalibleaquerys" });
return c.json({
success: true,
message: "All Current Active Querys.",
sheetVersion: 2.8,
data: current,
});
}
return c.json({
success: true,
message: "All Current Active Querys.",
sheetVersion: 2.8, // TODO: when this gets switched change this
data: current,
});
},
);
export default app;

View File

@@ -0,0 +1,54 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { getInhouseDeliveryByDateRange } from "../controller/getInhouseDeliveryByDateRange.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns deliveries by date range.",
method: "get",
path: "/inhousedeliverybydaterange",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const delivery: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/inhousedeliverybydaterange" });
const { data, error } = await tryCatch(
getInhouseDeliveryByDateRange(delivery ? delivery : null),
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the deliveries.",
data: error,
},
400,
);
}
return c.json({
success: data.success,
message: data.message,
data: data.data,
});
},
);
export default app;

View File

@@ -0,0 +1,65 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { getGetPSIForecastData } from "../controller/psiForecastData.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the psiforecastdata.",
method: "get",
path: "/psiforecastdata",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const customer: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/psiforecastdata" });
//console.log(articles["avs"][0]);
let customeArticle = null;
if (customer) {
customeArticle = customer["customer"][0];
}
const { data, error } = await tryCatch(
getGetPSIForecastData(customeArticle),
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the articles.",
data: error,
},
400,
);
}
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400,
);
},
);
export default app;

View File

@@ -1,64 +1,66 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { responses } from "../../../globalUtils/routeDefs/responses.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { psiGetInventory } from "../controller/psiGetInventory.js";
const app = new OpenAPIHono({ strict: false });
const Body = z.object({
includeRunnningNumbers: z.string().openapi({ example: "x" }),
includeRunnningNumbers: z.string().openapi({ example: "x" }),
});
app.openapi(
createRoute({
tags: ["dataMart"],
summary: "Returns the getPsiinventory.",
method: "get",
path: "/getpsiinventory",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const q: any = c.req.queries();
createRoute({
tags: ["dataMart"],
summary: "Returns the getPsiinventory.",
method: "get",
path: "/getpsiinventory",
request: {
body: {
content: {
"application/json": { schema: Body },
},
},
},
responses: responses(),
}),
async (c) => {
const q: any = c.req.queries();
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/getpsiinventory" });
//console.log(articles["avs"][0]);
const { data, error } = await tryCatch(
psiGetInventory(
q["avs"] ? q["avs"][0] : null,
q["startDate"] ? q["startDate"][0] : null,
q["endDate"] ? q["endDate"][0] : null
)
);
// make sure we have a vaid user being accessed thats really logged in
apiHit(c, { endpoint: "/getpsiinventory" });
//console.log(articles["avs"][0]);
const { data, error } = await tryCatch(
psiGetInventory(
q["avs"] ? q["avs"][0] : null,
q["startDate"] ? q["startDate"][0] : null,
q["endDate"] ? q["endDate"][0] : null,
q["whseToInclude"] ? q["whseToInclude"][0] : null,
q["exludeLanes"] ? q["exludeLanes"][0] : null,
),
);
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the production.",
data: error,
},
400
);
}
if (error) {
console.log(error);
return c.json(
{
success: false,
message: "There was an error getting the production.",
data: error,
},
400,
);
}
//console.log(data);
//console.log(data);
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400
);
}
return c.json(
{
success: data.success,
message: data.message,
data: data.data,
},
data.success ? 200 : 400,
);
},
);
export default app;

View File

@@ -2,65 +2,71 @@ import { OpenAPIHono } from "@hono/zod-openapi";
const app = new OpenAPIHono();
import stats from "./route/stats.js";
import history from "./route/invHistory.js";
import { createJob } from "../notifications/utils/processNotifications.js";
import { historicalInvIMmport } from "./utils/historicalInv.js";
import { tryCatch } from "../../globalUtils/tryCatch.js";
import { createLog } from "../logger/logger.js";
import { createJob } from "../notifications/utils/processNotifications.js";
import { query } from "../sqlServer/prodSqlServer.js";
import { shiftChange } from "../sqlServer/querys/misc/shiftChange.js";
import { createLog } from "../logger/logger.js";
import gpData from "./route/getGpData.js";
import lastPurch from "./route/getLastPurchPrice.js";
import lastSales from "./route/getLastSalesPrice.js";
import gpData from "./route/getGpData.js";
import consumptionData from "./route/getProductionConsumption.js";
import purchased from "./route/getPurchased.js";
import regrind from "./route/getregrind.js";
import soldItems from "./route/getSoldItems.js";
import purchased from "./route/getPurchased.js";
import history from "./route/invHistory.js";
import stats from "./route/stats.js";
import { historicalInvIMmport } from "./utils/historicalInv.js";
const routes = [
stats,
history,
lastPurch,
lastSales,
gpData,
consumptionData,
regrind,
soldItems,
purchased,
stats,
history,
lastPurch,
lastSales,
gpData,
consumptionData,
regrind,
soldItems,
purchased,
] as const;
const appRoutes = routes.forEach((route) => {
app.route("/eom", route);
app.route("/eom", route);
});
setTimeout(async () => {
const { data: shift, error: shiftError } = (await tryCatch(
query(shiftChange, "shift change from material.")
)) as any;
const { data: shift, error: shiftError } = (await tryCatch(
query(shiftChange, "shift change from material."),
)) as any;
if (shiftError) {
createLog(
"error",
"eom",
"eom",
"There was an error getting the shift times will use fallback times"
);
}
if (shiftError) {
createLog(
"error",
"eom",
"eom",
"There was an error getting the shift times will use fallback times",
);
}
// shift split
const shiftTimeSplit = shift?.data[0]?.shiftChange.split(":");
// shift split
const shiftTimeSplit = shift?.data[0]?.shiftChange.split(":");
const cronSetup = `${
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[1])}` : "0"
} ${
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[0])}` : "7"
} * * *`;
const cronSetup = `${
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[1])}` : "0"
} ${
shiftTimeSplit?.length > 0 ? `${parseInt(shiftTimeSplit[0])}` : "7"
} * * *`;
//console.log(cronSetup);
createJob("eom_historical_inv", cronSetup, historicalInvIMmport);
//console.log(cronSetup);
createJob("eom_historical_inv", cronSetup, historicalInvIMmport);
}, 5 * 1000);
// the time we want to run the hostircal data should be the same time the historical data run on the server
// getting this from the shift time
//if (process.env.NODE_ENV?.trim() !== "production") {
setTimeout(() => {
historicalInvIMmport();
}, 15 * 1000);
//}
export default app;

View File

@@ -76,7 +76,10 @@ export const historicalInvIMmport = async () => {
coa_QTY: i.COA_QTY,
held_QTY: i.Held_QTY,
consignment: i.Consigment,
lot_Number: i.lot,
lot_Number: i.Lot,
location: i.location,
whseId: i.warehouseID,
whseName: i.warehouseName,
};
});

View File

@@ -0,0 +1,155 @@
import axios from "axios";
import net from "net";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
type Data = {
runningNr: number;
reason: string;
user: string;
};
export const bookOutPallet = async (data: Data) => {
const { runningNr, reason, user } = data;
if (!reason || reason.length < 4) {
return {
success: false,
status: 400,
message: "The reason provided is to short",
data: [],
};
}
const queryCheck = sqlQuerySelector("inventoryInfo.query");
if (!queryCheck.success) {
return {
success: false,
status: 400,
message: queryCheck.message,
data: data,
};
}
const { data: label, error: labelError } = (await tryCatch(
query(
queryCheck.query!.replace("[runningNr]", `${runningNr}`),
"labelQuery",
),
)) as any;
if (labelError) {
return {
success: false,
status: 400,
message: labelError.message,
data: labelError,
};
}
// check if we are in ppoo
if (label.data.length <= 0) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently in ppoo, please move to ppoo before trying to book-out`,
data: [],
};
}
// check if the label is blocked for coa.
if (
label.data[0].blockingReason &&
!label.data[0].blockingReason?.includes("COA")
) {
return {
success: false,
status: 400,
message: `${runningNr} is not currently blocked for coa, to get this pallet booked out please take the label to quality to be released then you can book-out.`,
data: [],
};
}
if (label.data[0].blockingReason) {
await scanner.scan("AlplaPRODcmd89");
await scanner.scan(`${label.data[0].barcode}`);
}
// create the url to post
const url = await prodEndpointCreation(
"/public/v1.1/Manufacturing/ProductionControlling/BookOut",
);
const SSCC = await createSSCC(runningNr);
const bookOutData = {
sscc: SSCC.slice(2),
scannerId: "666",
};
try {
const results = await axios.post(url, bookOutData, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
if (results.data.Errors) {
return {
success: false,
status: 400,
message: results.data.Errors.Error.Description,
};
}
// if (results.data.Result !== 0) {
// console.log("stopping here and closing to soon", results);
// return {
// success: false,
// status: 400,
// message: results.data.Message,
// };
// }
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "book out",
bodySent: data,
reasonUsed: reason,
}),
);
return {
success: true,
message: `${runningNr} was booked out`,
status: results.status,
};
} catch (error: any) {
console.log(bookOutData);
return {
success: false,
status: 400,
message: error.response?.data,
data: error.response?.data,
};
}
// });
/**
* book out the label with
* url /public/v1.1/Manufacturing/ProductionControlling/BookOut
* {
* "sscc": "string",
* "scannerId": "string"
* }
*/
//---------------------------------------------------------------------------------------\\
};

View File

@@ -0,0 +1,96 @@
import axios from "axios";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
type Data = {
runningNr: number;
laneID: number;
};
export const relatePallet = async (data: Data) => {
const { runningNr, laneID } = data;
// replace the rn
// console.log(data);
// create the url to post
// do we have warehousing turned on?
const { data: feature, error: featureError } = (await tryCatch(
query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/Relocate";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/Relocate";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Unit/Relocate";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const SSCC = await createSSCC(runningNr);
const consumeSomething = {
ScannerId: 999,
laneId: laneID,
sscc: SSCC.slice(2),
};
console.log(consumeSomething);
try {
const results = await axios.post(url, consumeSomething, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
if (results.data.Errors) {
return {
success: false,
message: results.data.Errors.Error.Description,
};
}
if (results.data.Result !== 0 || results.data.data.length <= 0) {
return {
success: false,
message: results.data.Message,
};
}
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "relocate",
bodySent: data,
}),
);
return {
success: true,
message: "Pallet Was Relocated",
status: results.status,
};
} catch (error: any) {
console.log(error);
return {
success: false,
status: 200,
message: error.response?.data.errors[0].message,
};
}
};

View File

@@ -1,120 +1,50 @@
import axios from "axios";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { lstAuth } from "../../../../index.js";
import { createSSCC } from "../../../../globalUtils/createSSCC.js";
import { db } from "../../../../../database/dbclient.js";
import net from "net";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { scanner } from "../../../../globalUtils/scannerConnect.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
import { settings } from "../../../../../database/schema/settings.js";
import { eq } from "drizzle-orm";
import { serverData } from "../../../../../database/schema/serverData.js";
export const removeAsNonReusable = async (data: any) => {
// const removalUrl = await prodEndpointCreation(
// "/public/v1.0/Warehousing/RemoveAsNonReusableMaterial"
// );
// get the label info
const { data: label, error: labelError } = (await tryCatch(
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info"),
)) as any;
// const sscc = await createSSCC(data.runningNr);
if (label.data[0].stockStatus === "notOnStock") {
return {
success: false,
message: `The label: ${data.runningNr} is not currently in stock`,
data: [],
};
}
// const { data: remove, error } = await tryCatch(
// axios.post(
// removalUrl,
// { scannerId: "500", sscc: sscc.slice(2) },
// {
// headers: { Authorization: `Basic ${lstAuth}` },
// }
// )
// );
if (label.data[0].blockingReason) {
return {
success: false,
status: 400,
message: `${data.runningNr} is currently blocked, to get this pallet removed please take the label to quality to be released then you can remove.`,
data: [],
};
}
// use a scanner tcp connection to trigger this process
const STX = "\x02";
const ETX = "\x03";
const scanner = new net.Socket();
let stage = 0;
// get the label info
const { data: label, error: labelError } = (await tryCatch(
query(labelInfo.replaceAll("[runningNr]", data.runningNr), "Label Info")
)) as any;
await scanner.scan("AlplaPRODcmd23");
await scanner.scan(`${label.data[0].barcode}`);
if (label.data[0].stockStatus === "notOnStock") {
return {
success: false,
message: `The label: ${data.runningNr} is not currently in stock`,
data: [],
};
}
let reason = data.reason || "";
delete data.reason;
// get the server ip based on the token.
const setting = await db.select().from(settings);
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "removeAsNonReusable",
bodySent: data,
reasonUsed: reason,
}),
);
const plantInfo = await db.select().from(serverData);
const plantToken = setting.filter((n: any) => n.name === "plantToken");
const scannerID = setting.filter((n: any) => n.name === "scannerID");
const scannerPort = setting.filter((n: any) => n.name === "scannerPort");
const plantData = plantInfo.filter(
(p: any) => p.plantToken === plantToken[0].value
);
scanner.connect(
parseInt(scannerPort[0].value),
plantData[0].idAddress!,
async () => {
// need to get the ip from the server data and scanner port
//console.log(`connected to scanner`);
scanner.write(`${STX}${scannerID[0].value}@AlplaPRODcmd23${ETX}`);
}
);
scanner.on("data", (data) => {
const response = data.toString();
//console.log("Received:", response.trimStart());
if (stage === 0) {
stage = 1;
scanner.write(
`${STX}${scannerID[0].value}@${label.data[0].Barcode}${ETX}`
);
} else if (stage === 1) {
scanner.end();
}
});
scanner.on("close", () => {
//console.log("Connection closed");
scanner.destroy();
});
scanner.on("error", (err) => {
//console.error("Scanner error:", err);
scanner.destroy();
return {
success: false,
message: `The label: ${data.runningNr} encountering an error while being removed, please try again`,
data: [],
};
});
// if (error) {
// //console.log(error);
// return {
// success: false,
// message: `There was an error removing ${data.runningNr}`,
// data: [],
// };
// }
let reason = data.reason || "";
delete data.reason;
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "removeAsNonReusable",
bodySent: data,
reasonUsed: reason,
})
);
return {
success: true,
message: `The label: ${data.runningNr}, was removed`,
data: [],
};
return {
success: true,
message: `The label: ${data.runningNr}, was removed`,
data: [],
};
};

View File

@@ -0,0 +1,96 @@
import XLSX from "xlsx";
import { db } from "../../../../../../../database/dbclient.js";
import { settings } from "../../../../../../../database/schema/settings.js";
import { delay } from "../../../../../../globalUtils/delay.js";
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
import { postForecast } from "../postForecast.js";
export const abbottForecast = async (sheet: any, user: any) => {
const customerId = 8;
const posting: any = [];
const { data: s, error: e } = await tryCatch(db.select().from(settings));
if (e) {
return {
success: false,
message: `Error getting settings`,
data: e,
};
}
const plantToken = s.filter((s) => s.name === "plantToken");
const customHeaders = [
"date",
"time",
"newton8oz",
"newton10oz",
"E",
"F",
"fDate",
"f8ozqty",
"I",
"J",
"K",
"L",
"M",
"f10ozqty",
];
const forecastData = XLSX.utils.sheet_to_json(sheet, {
range: 5, // Start at row 5 (index 4)
header: customHeaders,
defval: "", // Default value for empty cells
});
for (let i = 1; i < forecastData.length; i++) {
const row: any = forecastData[i];
//console.log(row);
//if (row.fDate == undefined) continue;
if (row.fDate !== "") {
const date = isNaN(row.fDate)
? new Date(row.fDate)
: excelDateStuff(row.fDate);
// for 8oz do
if (row.f8ozqty > 0) {
posting.push({
customerArticleNo: "45300DA",
quantity: row.f8ozqty,
requirementDate: date,
});
}
if (row.f10ozqty > 0) {
posting.push({
customerArticleNo: "43836DA",
quantity: row.f10ozqty,
requirementDate: date,
});
}
}
}
// the predefined data that will never change
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `ForecastFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
sender: user.username || "lst-system",
customerId: customerId,
positions: [],
};
// add the new forecast to the predefined data
let updatedPredefinedObject = {
...predefinedObject,
positions: [...predefinedObject.positions, ...posting],
};
const forecast: any = await postForecast(updatedPredefinedObject, user);
return {
success: forecast.success,
message: forecast.message,
data: forecast.data,
};
};

View File

@@ -1,95 +1,102 @@
import { addDays } from "date-fns";
import XLSX from "xlsx";
import { db } from "../../../../../../../database/dbclient.js";
import { settings } from "../../../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import XLSX from "xlsx";
import { postForecast } from "../postForecast.js";
import { createLog } from "../../../../../logger/logger.js";
import { sendEmail } from "../../../../../notifications/controller/sendMail.js";
import { query } from "../../../../../sqlServer/prodSqlServer.js";
import { activeArticle } from "../../../../../sqlServer/querys/dataMart/article.js";
import { addDays } from "date-fns";
import { sendEmail } from "../../../../../notifications/controller/sendMail.js";
import { createLog } from "../../../../../logger/logger.js";
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
import { postForecast } from "../postForecast.js";
export const energizerForecast = async (data: any, user: any) => {
/**
* Post a standard forecast based on the standard template.
*/
/**
* Post a standard forecast based on the standard template.
*/
const { data: s, error: e } = await tryCatch(db.select().from(settings));
const { data: s, error: e } = await tryCatch(db.select().from(settings));
if (e) {
return {
success: false,
message: `Error getting settings`,
data: e,
};
}
if (e) {
return {
success: false,
message: `Error getting settings`,
data: e,
};
}
const plantToken = s.filter((s) => s.name === "plantToken");
const plantToken = s.filter((s) => s.name === "plantToken");
const arrayBuffer = await data.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const arrayBuffer = await data.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const workbook = XLSX.read(buffer, { type: "buffer" });
const workbook = XLSX.read(buffer, { type: "buffer" });
const sheet: any = workbook.Sheets["Sheet1"];
const range = XLSX.utils.decode_range(sheet["!ref"]);
const sheet: any = workbook.Sheets["Sheet1"];
const range = XLSX.utils.decode_range(sheet["!ref"]);
const headers = [
"CustomerArticleNumber",
"Quantity",
"RequirementDate",
"CustomerID",
];
const headers = [
"CustomerArticleNumber",
"Quantity",
"RequirementDate",
"CustomerID",
];
// formatting the data
const rows = XLSX.utils.sheet_to_json(sheet, { header: 1 }) as any;
// formatting the data
const rows = XLSX.utils.sheet_to_json(sheet, { header: 1 }) as any;
const posting: any = [];
const customerId = 44;
const posting: any = [];
const customerId = 44;
for (let i = 1; i < rows.length; i++) {
const row: any = rows[i];
const material = row[0];
for (let i = 1; i < rows.length; i++) {
const row: any = rows[i];
const material = row[0];
if (material == undefined) continue;
for (let j = 1; j < row.length; j++) {
const qty = row[j];
if (material == undefined) continue;
for (let j = 1; j < row.length; j++) {
const qty = row[j];
if (qty && qty !== 0) {
const requirementDate = rows[0][j]; // first row is dates
if (qty && qty > 0) {
const requirementDate = rows[0][j]; // first row is dates
const date = isNaN(requirementDate)
? new Date(requirementDate)
: excelDateStuff(requirementDate);
posting.push({
customerArticleNo: material,
quantity: qty,
requirementDate: new Date(requirementDate),
});
}
}
}
console.log(isNaN(requirementDate), requirementDate, date);
posting.push({
customerArticleNo: material,
quantity: qty,
requirementDate: date,
});
}
}
}
// the predefined data that will never change
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `ForecastFromLST-${new Date(Date.now()).toLocaleString(
"en-US"
)}`,
sender: user.username || "lst-system",
customerId: customerId,
positions: [],
};
//console.log(posting);
// add the new forecast to the predefined data
let updatedPredefinedObject = {
...predefinedObject,
positions: [...predefinedObject.positions, ...posting],
};
// the predefined data that will never change
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `ForecastFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
sender: user.username || "lst-system",
customerId: customerId,
positions: [],
};
//post it
const forecastData: any = await postForecast(updatedPredefinedObject, user);
// add the new forecast to the predefined data
let updatedPredefinedObject = {
...predefinedObject,
positions: [...predefinedObject.positions, ...posting],
};
return {
success: forecastData.success,
message: forecastData.message,
data: forecastData.data,
};
//post it
const forecastData: any = await postForecast(updatedPredefinedObject, user);
return {
success: forecastData.success,
message: forecastData.message,
data: forecastData.data,
};
};

View File

@@ -1,12 +1,14 @@
import { addDays, addHours, isAfter, parse, subDays } from "date-fns";
import { format } from "date-fns-tz";
import XLSX from "xlsx";
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import { db } from "../../../../../../../database/dbclient.js";
import { settings } from "../../../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import { query } from "../../../../../sqlServer/prodSqlServer.js";
import { bulkOrderArticleInfo } from "../../../../../sqlServer/querys/dm/bulkOrderArticleInfo.js";
import { addDays, addHours, isAfter, parse } from "date-fns";
import { orderState } from "../../../../../sqlServer/querys/dm/orderState.js";
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
import { abbottForecast } from "../../forecast/mappings/abbott.js";
import { postOrders } from "../postOrders.js";
// customeris/articles stuff will be in basis once we move to iowa
@@ -14,171 +16,181 @@ let customerID = 8;
let invoiceID = 9;
let articles = "118,120";
export const abbottOrders = async (data: any, user: any) => {
/**
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
*/
/**
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
*/
const { data: s, error: e } = await tryCatch(db.select().from(settings));
const { data: s, error: e } = await tryCatch(db.select().from(settings));
if (e) {
return {
sucess: false,
message: `Error getting settings`,
data: e,
};
}
if (e) {
return {
sucess: false,
message: `Error getting settings`,
data: e,
};
}
// articleInfo
const { data: article, error: ae } = await tryCatch(
query(
bulkOrderArticleInfo.replace("[articles]", articles),
"Get Article data for bulk orders"
)
);
const a: any = article?.data;
if (ae) {
return {
sucess: false,
message: `Error getting article data`,
data: ae,
};
}
// articleInfo
const { data: article, error: ae } = await tryCatch(
query(
bulkOrderArticleInfo.replace("[articles]", articles),
"Get Article data for bulk orders",
),
);
const a: any = article?.data;
if (ae) {
return {
sucess: false,
message: `Error getting article data`,
data: ae,
};
}
// order state
const { data: o, error: oe } = await tryCatch(
query(orderState, "Gets the next 500 orders that have not been started")
);
// order state
const { data: o, error: oe } = await tryCatch(
query(orderState, "Gets the next 500 orders that have not been started"),
);
const openOrders: any = o?.data;
const openOrders: any = o?.data;
if (oe) {
return {
sucess: false,
message: `Error getting article data`,
data: oe,
};
}
if (oe) {
return {
sucess: false,
message: `Error getting article data`,
data: oe,
};
}
const plantToken = s.filter((s) => s.name === "plantToken");
const plantToken = s.filter((s) => s.name === "plantToken");
const arrayBuffer = await data.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const arrayBuffer = await data.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const workbook = XLSX.read(buffer, { type: "buffer" });
const workbook = XLSX.read(buffer, { type: "buffer" });
const sheetName = workbook.SheetNames[0];
const sheet = workbook.Sheets[sheetName];
const sheetName = workbook.SheetNames[0];
const sheet = workbook.Sheets[sheetName];
// Define custom headers
const customHeaders = ["date", "time", "newton8oz", "newton10oz"];
const orderData = XLSX.utils.sheet_to_json(sheet, {
range: 5, // Start at row 5 (index 4)
header: customHeaders,
defval: "", // Default value for empty cells
});
abbottForecast(sheet, user);
// Define custom headers
const customHeaders = ["date", "time", "newton8oz", "newton10oz"];
const orderData = XLSX.utils.sheet_to_json(sheet, {
range: 5, // Start at row 5 (index 4)
header: customHeaders,
defval: "", // Default value for empty cells
});
// the base of the import
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US"
)}`,
sender: user.username || "lst-system",
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US"
)}`,
orders: [],
};
const oOrders: any = openOrders;
// the base of the import
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
sender: user.username || "lst-system",
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
orders: [],
};
const oOrders: any = openOrders;
//console.log(orderData);
let correctedOrders: any = orderData
.filter(
(o: any) =>
(o.newton8oz && o.newton8oz.trim() !== "") ||
(o.newton10oz && o.newton10oz.trim() !== "")
)
.map((o: any) => ({
date: excelDateStuff(o.date, o.time),
po:
o.newton8oz.replace(/\s+/g, "") !== ""
? o.newton8oz.replace(/\s+/g, "")
: o.newton10oz.replace(/\s+/g, ""),
customerArticlenumber:
o.newton8oz != ""
? a.filter((a: any) => a.av === 118)[0]
.CustomerArticleNumber
: a.filter((a: any) => a.av === 120)[0]
.CustomerArticleNumber,
qty:
o.newton8oz != ""
? a.filter((a: any) => a.av === 118)[0].totalTruckLoad
: a.filter((a: any) => a.av === 120)[0].totalTruckLoad,
}));
function trimAll(str: string) {
return str.replace(/\s+/g, "");
}
let correctedOrders: any = orderData
.filter(
(o: any) =>
(o.newton8oz && o.newton8oz.trim() !== "") ||
(o.newton10oz && o.newton10oz.trim() !== ""),
)
.map((o: any) => ({
date: excelDateStuff(o.date, o.time),
po:
trimAll(o.newton8oz) !== ""
? trimAll(o.newton8oz)
: o.newton10oz.replace(/[\s\u00A0]+/g, ""),
customerArticlenumber:
o.newton8oz != ""
? a.filter((a: any) => a.av === 118)[0].CustomerArticleNumber
: a.filter((a: any) => a.av === 120)[0].CustomerArticleNumber,
qty:
o.newton8oz != ""
? a.filter((a: any) => a.av === 118)[0].totalTruckLoad
: a.filter((a: any) => a.av === 120)[0].totalTruckLoad,
}));
// now we want to make sure we only correct orders that or after now
correctedOrders = correctedOrders.filter((o: any) => {
const parsedDate = parse(o.date, "M/d/yyyy, h:mm:ss a", new Date());
return isAfter(o.date, new Date().toISOString());
});
//console.log(correctedOrders);
// now we want to make sure we only correct orders that or after now
correctedOrders = correctedOrders.filter((o: any) => {
const parsedDate = parse(o.date, "M/d/yyyy, h:mm:ss a", new Date());
return isAfter(new Date(o.date), new Date().toISOString());
});
//console.log(correctedOrders);
// last map to remove orders that have already been started
// correctedOrders = correctedOrders.filter((oo: any) =>
// oOrders.some((o: any) => o.CustomerOrderNumber === oo.po)
// );
let postedOrders: any = [];
const filterOrders: any = correctedOrders;
// last map to remove orders that have already been started
// correctedOrders = correctedOrders.filter((oo: any) =>
// oOrders.some((o: any) => o.CustomerOrderNumber === oo.po)
// );
let postedOrders: any = [];
const filterOrders: any = correctedOrders;
filterOrders.forEach((oo: any) => {
const isMatch = openOrders.some(
(o: any) => String(o.po).trim() === String(oo.po).trim()
);
if (!isMatch) {
//console.log(`ok to update: ${oo.po}`);
//console.log(filterOrders);
// oo = {
// ...oo,
// CustomerOrderNumber: oo.CustomerOrderNumber.replace(" ", ""),
// };
postedOrders.push(oo);
} else {
// console.log(`Not valid order to update: ${oo.po}`);
//console.log(oo)
}
});
filterOrders.forEach((oo: any) => {
const isMatch = openOrders.some(
(o: any) => String(o.po).trim() === String(oo.po).trim(),
);
//console.log(isMatch, oo.po);
if (!isMatch) {
console.log(`ok to update: ${oo.po}`);
// Map Excel data to predefinedObject format
const orders = filterOrders.map((o: any) => {
return {
customerId: customerID,
invoiceAddressId: invoiceID,
customerOrderNo: o.po,
orderDate: new Date(Date.now()).toLocaleString("en-US"),
positions: [
{
deliveryAddressId: 8,
customerArticleNo: o.customerArticlenumber,
quantity: o.qty,
deliveryDate: addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
customerLineItemNo: 1, // this is how it is currently sent over from abbott
customerReleaseNo: 1, // same as above
},
],
};
});
// oo = {
// ...oo,
// CustomerOrderNumber: oo.CustomerOrderNumber.replace(" ", ""),
// };
postedOrders.push(oo);
} else {
//console.log(`Not valid order to update: ${oo.po}`);
//console.log(oo)
}
});
// combine it all together.
const updatedPredefinedObject = {
...predefinedObject,
orders: [...predefinedObject.orders, ...orders],
};
// Map Excel data to predefinedObject format
const orders = filterOrders.map((o: any) => {
//console.log(o.po, " ", o.date, format(o.date, "M/d/yyyy HH:mm"));
return {
customerId: customerID,
invoiceAddressId: invoiceID,
customerOrderNo: o.po,
orderDate: new Date(Date.now()).toLocaleString("en-US"),
positions: [
{
deliveryAddressId: 8,
customerArticleNo: o.customerArticlenumber,
quantity: o.qty,
deliveryDate: format(o.date, "M/d/yyyy HH:mm"), // addHours(format(o.date, "M/d/yyyy HH:mm"), 1), //addHours(addDays(o.date, 1), 1), // adding this in so we can over come the constant 1 day behind thing as a work around
customerLineItemNo: 1, // this is how it is currently sent over from abbott
customerReleaseNo: 1, // same as above
},
],
};
});
//console.log(updatedPredefinedObject);
// post the orders to the server
const posting = await postOrders(updatedPredefinedObject, user);
//console.log(posting);
//console.log(orders);
// combine it all together.
const updatedPredefinedObject = {
...predefinedObject,
orders: [...predefinedObject.orders, ...orders],
};
return {
success: posting?.success,
message: posting?.message,
data: posting,
};
//console.log(updatedPredefinedObject);
// post the orders to the server
const posting = await postOrders(updatedPredefinedObject, user);
//console.log(posting);
return {
success: posting?.success,
message: posting?.message,
data: posting,
};
};

View File

@@ -1,172 +1,172 @@
import XLSX from "xlsx";
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import { db } from "../../../../../../../database/dbclient.js";
import { settings } from "../../../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import { query } from "../../../../../sqlServer/prodSqlServer.js";
import { invoiceAddress } from "../../../../../sqlServer/querys/dm/invoiceAddress.js";
import { orderState } from "../../../../../sqlServer/querys/dm/orderState.js";
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
import { invoiceAddress } from "../../../../../sqlServer/querys/dm/invoiceAddress.js";
import { postOrders } from "../postOrders.js";
export const energizerOrders = async (data: any, user: any) => {
/**
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
*/
/**
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
*/
const { data: s, error: e } = await tryCatch(db.select().from(settings));
const { data: s, error: e } = await tryCatch(db.select().from(settings));
if (e) {
return {
sucess: false,
message: `Error getting settings`,
data: e,
};
}
if (e) {
return {
sucess: false,
message: `Error getting settings`,
data: e,
};
}
// order state
const { data: o, error: oe } = await tryCatch(
query(orderState, "Gets the next 500 orders that have not been started")
);
// order state
const { data: o, error: oe } = await tryCatch(
query(orderState, "Gets the next 500 orders that have not been started"),
);
const openOrders: any = o?.data;
const openOrders: any = o?.data;
if (oe) {
return {
sucess: false,
message: `Error getting article data`,
data: oe,
};
}
if (oe) {
return {
sucess: false,
message: `Error getting article data`,
data: oe,
};
}
// order state
const { data: invoice, error: ie } = await tryCatch(
query(invoiceAddress, "Gets invoices addresses")
);
const i: any = invoice?.data;
// order state
const { data: invoice, error: ie } = await tryCatch(
query(invoiceAddress, "Gets invoices addresses"),
);
const i: any = invoice?.data;
if (ie) {
return {
sucess: false,
message: `Error getting invoice address data`,
data: ie,
};
}
const plantToken = s.filter((s) => s.name === "plantToken");
if (ie) {
return {
sucess: false,
message: `Error getting invoice address data`,
data: ie,
};
}
const plantToken = s.filter((s) => s.name === "plantToken");
const arrayBuffer = await data.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const arrayBuffer = await data.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const workbook = XLSX.read(buffer, { type: "buffer" });
const workbook = XLSX.read(buffer, { type: "buffer" });
const sheetName = workbook.SheetNames[0];
const sheet = workbook.Sheets[sheetName];
const sheetName = workbook.SheetNames[0];
const sheet = workbook.Sheets[sheetName];
// define custom headers
const headers = [
"ITEM",
"PO",
"ReleaseNo",
"QTY",
"DELDATE",
"COMMENTS",
"What changed",
"CUSTOMERID",
"Remark",
];
const orderData = XLSX.utils.sheet_to_json(sheet, {
defval: "",
header: headers,
range: 1,
});
// define custom headers
const headers = [
"ITEM",
"PO",
"ReleaseNo",
"QTY",
"DELDATE",
"COMMENTS",
"What changed",
"CUSTOMERID",
"Remark",
];
const orderData = XLSX.utils.sheet_to_json(sheet, {
defval: "",
header: headers,
range: 1,
});
// the base of the import
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US"
)}`,
sender: user.username || "lst-system",
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US"
)}`,
orders: [],
};
// the base of the import
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
sender: user.username || "lst-system",
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
orders: [],
};
let newOrders: any = orderData;
let newOrders: any = orderData;
// filter out the orders that have already been started just to reduce the risk of errors.
newOrders.filter((oo: any) =>
openOrders.some(
(o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber
)
);
// filter out the orders that have already been started just to reduce the risk of errors.
newOrders.filter((oo: any) =>
openOrders.some(
(o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber,
),
);
// filter out the blanks
newOrders = newOrders.filter((z: any) => z.ITEM !== "");
// filter out the blanks
newOrders = newOrders.filter((z: any) => z.ITEM !== "");
// let postedOrders: any = [];
// for (const [customerID, orders] of Object.entries(orderData)) {
// // console.log(`Running for Customer ID: ${customerID}`);
// const newOrders: any = orderData;
// let postedOrders: any = [];
// for (const [customerID, orders] of Object.entries(orderData)) {
// // console.log(`Running for Customer ID: ${customerID}`);
// const newOrders: any = orderData;
// // filter out the orders that have already been started just to reduce the risk of errors.
// newOrders.filter((oo: any) =>
// openOrders.some(
// (o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber
// )
// );
// // filter out the orders that have already been started just to reduce the risk of errors.
// newOrders.filter((oo: any) =>
// openOrders.some(
// (o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber
// )
// );
// // map everything out for each order
const nOrder = newOrders.map((o: any) => {
const invoice = i.filter(
(i: any) => i.deliveryAddress === parseInt(o.CUSTOMERID)
);
if (!invoice) {
return;
}
return {
customerId: parseInt(o.CUSTOMERID),
invoiceAddressId: invoice[0].invoiceAddress, // matched to the default invoice address
customerOrderNo: o.PO,
orderDate: new Date(Date.now()).toLocaleString("en-US"),
positions: [
{
deliveryAddressId: parseInt(o.CUSTOMERID),
customerArticleNo: o.ITEM,
quantity: parseInt(o.QTY),
deliveryDate: o.DELDATE, //excelDateStuff(o.DELDATE),
customerLineItemNo: o.ReleaseNo, // this is how it is currently sent over from abbott
customerReleaseNo: o.ReleaseNo, // same as above
remark: o.remark === "" ? null : o.remark,
},
],
};
});
// // map everything out for each order
const nOrder = newOrders.map((o: any) => {
const invoice = i.filter(
(i: any) => i.deliveryAddress === parseInt(o.CUSTOMERID),
);
if (!invoice) {
return;
}
return {
customerId: parseInt(o.CUSTOMERID),
invoiceAddressId: invoice[0].invoiceAddress, // matched to the default invoice address
customerOrderNo: o.PO,
orderDate: new Date(Date.now()).toLocaleString("en-US"),
positions: [
{
deliveryAddressId: parseInt(o.CUSTOMERID),
customerArticleNo: o.ITEM,
quantity: parseInt(o.QTY),
deliveryDate: o.DELDATE, //excelDateStuff(o.DELDATE),
customerLineItemNo: o.ReleaseNo, // this is how it is currently sent over from abbott
customerReleaseNo: o.ReleaseNo, // same as above
remark: o.COMMENTS === "" ? null : o.COMMENTS,
},
],
};
});
// // do that fun combining thing
const updatedPredefinedObject = {
...predefinedObject,
orders: [...predefinedObject.orders, ...nOrder],
};
// // do that fun combining thing
const updatedPredefinedObject = {
...predefinedObject,
orders: [...predefinedObject.orders, ...nOrder],
};
// //console.log(updatedPredefinedObject);
// //console.log(updatedPredefinedObject);
// // post the orders to the server
const posting: any = await postOrders(updatedPredefinedObject, user);
// // post the orders to the server
const posting: any = await postOrders(updatedPredefinedObject, user);
return {
customer: nOrder[0].CUSTOMERID,
//totalOrders: orders?.length(),
success: posting.success,
message: posting.message,
data: posting.data,
};
// }
return {
customer: nOrder[0].CUSTOMERID,
//totalOrders: orders?.length(),
success: posting.success,
message: posting.message,
data: posting.data,
};
// }
// return {
// success: true,
// message:
// "Standard Template was just processed successfully, please check AlplaProd 2.0 to confirm no errors. ",
// data: nOrder,
// };
// return {
// success: true,
// message:
// "Standard Template was just processed successfully, please check AlplaProd 2.0 to confirm no errors. ",
// data: nOrder,
// };
};

View File

@@ -0,0 +1,166 @@
import XLSX from "xlsx";
import { db } from "../../../../../../../database/dbclient.js";
import { settings } from "../../../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../../../globalUtils/tryCatch.js";
import { query } from "../../../../../sqlServer/prodSqlServer.js";
import { invoiceAddress } from "../../../../../sqlServer/querys/dm/invoiceAddress.js";
import { orderState } from "../../../../../sqlServer/querys/dm/orderState.js";
import { excelDateStuff } from "../../../../utils/excelDateStuff.js";
import { postOrders } from "../postOrders.js";
export const scjOrders = async (data: any, user: any) => {
/**
* Standard orders meaning that we get the standard file exported and fill it out and uplaod to lst.
*/
const customerID = 48;
const { data: s, error: e } = await tryCatch(db.select().from(settings));
if (e) {
return {
sucess: false,
message: `Error getting settings`,
data: e,
};
}
// order state
const { data: o, error: oe } = await tryCatch(
query(orderState, "Gets the next 500 orders that have not been started"),
);
const openOrders: any = o?.data;
if (oe) {
return {
sucess: false,
message: `Error getting article data`,
data: oe,
};
}
// order state
const { data: invoice, error: ie } = await tryCatch(
query(invoiceAddress, "Gets invoices addresses"),
);
const i: any = invoice?.data;
if (ie) {
return {
sucess: false,
message: `Error getting invoice address data`,
data: ie,
};
}
const plantToken = s.filter((s) => s.name === "plantToken");
const arrayBuffer = await data.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const workbook = XLSX.read(buffer, { type: "buffer" });
const sheetName: any = workbook.Sheets["Sheet1"];
const sheet = XLSX.utils.decode_range(sheetName["!ref"]);
// define custom headers
const headers = [
"ItemNo",
"Description",
"DeliveryDate",
"Quantity",
"PO",
"Releases",
"remarks",
];
const orderData = XLSX.utils.sheet_to_json(sheetName, {
defval: "",
header: headers,
range: 1,
});
// the base of the import
const predefinedObject = {
receivingPlantId: plantToken[0].value,
documentName: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
sender: user.username || "lst-system",
externalRefNo: `OrdersFromLST-${new Date(Date.now()).toLocaleString(
"en-US",
)}`,
orders: [],
};
let newOrders: any = orderData;
// filter out the orders that have already been started just to reduce the risk of errors.
newOrders.filter((oo: any) =>
openOrders.some(
(o: any) => o.CustomerOrderNumber === oo.CustomerOrderNumber,
),
);
// filter out the blanks
newOrders = newOrders.filter((z: any) => z.ItemNo !== "");
const nOrder = newOrders.map((o: any) => {
const invoice = i.filter((i: any) => i.deliveryAddress === customerID);
if (!invoice) {
return;
}
if (o.Releases === "") {
return;
}
if (o.PO === "") {
return;
}
const date = isNaN(o.DeliveryDate)
? new Date(o.DeliveryDate)
: excelDateStuff(o.DeliveryDate);
return {
customerId: customerID,
invoiceAddressId: invoice[0].invoiceAddress, // matched to the default invoice address
customerOrderNo: o.PO,
orderDate: new Date(Date.now()).toLocaleString("en-US"),
positions: [
{
deliveryAddressId: customerID,
customerArticleNo: o.ItemNo,
quantity: parseInt(o.Quantity),
deliveryDate: date, //excelDateStuff(o.DELDATE),
customerLineItemNo: o.PO, // this is how it is currently sent over from abbott
customerReleaseNo: o.Releases, // same as above
remark: o.remarks === "" ? null : o.remarks,
},
],
};
});
//console.log(nOrder.filter((o: any) => o !== undefined));
// // do that fun combining thing
const updatedPredefinedObject = {
...predefinedObject,
orders: [
...predefinedObject.orders,
...nOrder.filter((o: any) => o !== undefined),
],
};
//console.log(updatedPredefinedObject.orders[0]);
// // post the orders to the server
const posting: any = await postOrders(updatedPredefinedObject, user);
return {
customer: customerID,
//totalOrders: orders?.length(),
success: posting.success,
message: posting.message,
data: posting.data,
};
};

View File

@@ -1,61 +1,70 @@
import { abbottOrders } from "./mappings/abbottTruckList.js";
import { energizerOrders } from "./mappings/energizerOrdersIn.js";
import { macroImportOrders } from "./mappings/macroImport.js";
import { scjOrders } from "./mappings/scj.js";
import { standardOrders } from "./mappings/standardOrders.js";
export const ordersIn = async (data: any, user: any) => {
/**
* Bulk orders in, and custom file parsing.
*/
/**
* Bulk orders in, and custom file parsing.
*/
let success = true;
let message = "";
let orderData: any = [];
let success = true;
let message = "";
let orderData: any = [];
// what type of order are we dealing with?
if (data["fileType"] === "standard") {
// run the standard orders in
const standard = await standardOrders(data["postOrders"], user);
success = standard.success ?? false;
message = standard.message ?? "Error posting Standard Orders";
orderData = standard.data;
}
// what type of order are we dealing with?
if (data["fileType"] === "standard") {
// run the standard orders in
const standard = await standardOrders(data["postOrders"], user);
success = standard.success ?? false;
message = standard.message ?? "Error posting Standard Orders";
orderData = standard.data;
}
if (data["fileType"] === "abbott") {
// orders in
const abbott = await abbottOrders(data["postOrders"], user);
success = abbott.success ?? false;
message = abbott.message ?? "Error posting Abbott Orders";
orderData = abbott.data;
}
if (data["fileType"] === "abbott") {
// orders in
const abbott = await abbottOrders(data["postOrders"], user);
success = abbott.success ?? false;
message = abbott.message ?? "Error posting Abbott Orders";
orderData = abbott.data;
}
if (data["fileType"] === "energizer") {
// orders in
const energizer = await energizerOrders(data["postOrders"], user);
success = energizer.success ?? false;
message = energizer.message ?? "Error posting Energizer Orders";
orderData = energizer.data;
}
if (data["fileType"] === "energizer") {
// orders in
const energizer = await energizerOrders(data["postOrders"], user);
success = energizer.success ?? false;
message = energizer.message ?? "Error posting Energizer Orders";
orderData = energizer.data;
}
if (data["fileType"] === "loreal") {
// orders in
}
if (data["fileType"] === "loreal") {
// orders in
}
if (data["fileType"] === "pg") {
// orders in
}
if (data["fileType"] === "pg") {
// orders in
}
if (data["fileType"] === "macro") {
// orders in
const macro = await macroImportOrders(data["postOrders"], user);
success = macro.success ?? false;
message = macro.message ?? "Error posting Macro Orders";
orderData = macro.data;
}
if (data["fileType"] === "macro") {
// orders in
const macro = await macroImportOrders(data["postOrders"], user);
success = macro.success ?? false;
message = macro.message ?? "Error posting Macro Orders";
orderData = macro.data;
}
return {
success,
message,
data: orderData,
};
if (data["fileType"] === "scj") {
// orders in
const macro = await scjOrders(data["postOrders"], user);
success = macro.success ?? false;
message = macro.message ?? "Error posting Macro Orders";
orderData = macro.data;
}
return {
success,
message,
data: orderData,
};
};

View File

@@ -1,78 +1,78 @@
import axios from "axios";
import { labelData } from "../../../sqlServer/querys/materialHelpers/labelInfo.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { createLog } from "../../../logger/logger.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { db } from "../../../../../database/dbclient.js";
import { commandLog } from "../../../../../database/schema/commandLog.js";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { labelData } from "../../../sqlServer/querys/materialHelpers/labelInfo.js";
import { labelInfo } from "../../../sqlServer/querys/warehouse/labelInfo.js";
type Data = {
runningNr: string;
lotNum: number;
runningNr: string;
lotNum: number;
};
export const consumeMaterial = async (data: Data) => {
const { runningNr, lotNum } = data;
// replace the rn
const { runningNr, lotNum } = data;
// replace the rn
console.log(data);
// console.log(data);
const rnReplace = labelData.replaceAll("[rn]", runningNr);
const rnReplace = labelInfo.replaceAll("[runningNr]", runningNr);
let barcode;
// get the barcode from the running number
try {
const r: any = await query(rnReplace, "labelData");
barcode = r?.data;
} catch (error) {
console.log(error);
createLog("error", "", "logistics", `Error getting barcode: ${error}`);
}
let barcode;
// get the barcode from the running number
try {
const r: any = await query(rnReplace, "labelData");
//console.log(r);
barcode = r?.data;
} catch (error) {
console.log(error);
createLog("error", "", "logistics", `Error getting barcode: ${error}`);
}
if (barcode.length === 0) {
return {
success: false,
message: "The running number you've entered not on stock.",
};
//throw Error("The provided runningNr is not in stock");
}
// create the url to post
const url = await prodEndpointCreation(
"/public/v1.0/IssueMaterial/ConsumeNonPreparedManualMaterial"
);
if (barcode.length === 0) {
return {
success: false,
message: "The running number you've entered not on stock.",
};
//throw Error("The provided runningNr is not in stock");
}
// create the url to post
const url = await prodEndpointCreation(
"/public/v1.0/IssueMaterial/ConsumeNonPreparedManualMaterial",
);
const consumeSomething = {
productionLot: lotNum,
barcode: barcode[0]?.barcode,
};
const consumeSomething = {
productionLot: lotNum,
barcode: barcode[0]?.barcode,
};
try {
const results = await axios.post(url, consumeSomething, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
try {
const results = await axios.post(url, consumeSomething, {
headers: {
"X-API-Key": process.env.TEC_API_KEY || "",
"Content-Type": "application/json",
},
});
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "consumeMaterial",
bodySent: data,
})
);
return {
success: true,
message: "Material was consumed",
status: results.status,
};
} catch (error: any) {
console.log(error);
return {
success: false,
status: 200,
message: error.response?.data.errors[0].message,
};
}
const { data: commandL, error: ce } = await tryCatch(
db.insert(commandLog).values({
commandUsed: "consumeMaterial",
bodySent: data,
}),
);
return {
success: true,
message: "Material was consumed",
status: results.status,
};
} catch (error: any) {
console.log(error);
return {
success: false,
status: 200,
message: error.response?.data.errors[0].message,
};
}
};

View File

@@ -1,6 +1,7 @@
import axios from "axios";
import { prodEndpointCreation } from "../../../../globalUtils/createUrl.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
export const postAdjustment = async (data: any) => {
if (data.warehouseId === undefined) {
@@ -35,9 +36,30 @@ export const postAdjustment = async (data: any) => {
quantity: data.quantity,
};
let url = await prodEndpointCreation(
"/public/v1.0/Warehousing/AdjustSiloStockLevel",
);
// do we have warehousing turned on?
const { data: feature, error: featureError } = (await tryCatch(
query(
`SELECT [Id]
,[Feature]
,[Enabled]
,[ActivationDate]
FROM [test1_AlplaPROD2.0_Read].[support].[FeatureActivation] where [Feature] = 7`,
"feature switch check",
),
)) as any;
let prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
if (featureError) {
prodUrl = "/public/v1.0/Warehousing/AdjustSiloStockLevel";
}
if (feature?.data.length > 0) {
prodUrl = "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel";
}
// 1.0 "/public/v1.0/Warehousing/AdjustSiloStockLevel","
// 1.1 "/public/v1.1/Warehousing/Lane/AdjustSiloStockLevel"
let url = await prodEndpointCreation(prodUrl);
const { data: silo, error } = await tryCatch(
axios.post(url, siloAdjustment, {

View File

@@ -1,7 +1,9 @@
import { OpenAPIHono } from "@hono/zod-openapi";
import { migrateAdjustments } from "./controller/siloAdjustments/migrateAdjustments.js";
import { getLanesToCycleCount } from "./controller/warehouse/cycleCountChecks/cyclecountCheck.js";
import attachSilo from "./route/attachSilo.js";
import bookOutPallet from "./route/bookout.js";
import comsumeMaterial from "./route/consumeMaterial.js";
import detachSilo from "./route/detachSilo.js";
import postBulkOrders from "./route/dm/bulkOrdersIn.js";
@@ -16,6 +18,7 @@ import outbound from "./route/getOutbound.js";
import getPPOO from "./route/getPPOO.js";
import getConnectionType from "./route/getSiloConnectionData.js";
import getSSCC from "./route/getSSCCNumber.js";
import relocate from "./route/relocate.js";
import removeAsNonReable from "./route/removeAsNonReusable.js";
import returnMat from "./route/returnMaterial.js";
import createSiloAdjustment from "./route/siloAdjustments/createSiloAdjustment.js";
@@ -28,7 +31,7 @@ const app = new OpenAPIHono();
const routes = [
comsumeMaterial,
returnMat,
relocate,
// silo
createSiloAdjustment,
postComment,
@@ -55,6 +58,7 @@ const routes = [
// logisitcs
removeAsNonReable,
getSSCC,
bookOutPallet,
] as const;
// app.route("/server", modules);

View File

@@ -0,0 +1,87 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { verify } from "hono/jwt";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
//import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { bookOutPallet } from "../controller/commands/bookout.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Consumes material based on its running number",
method: "post",
path: "/bookout",
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
const { data, error } = await tryCatch(c.req.json());
if (error) {
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
apiHit(c, { endpoint: "/bookout", lastBody: data });
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
//const pointData = { ...data, user: payload.user };
const bookout = await bookOutPallet(data);
console.log("from booout:", bookout);
return c.json(
{
success: bookout?.success,
message: bookout?.message,
data: bookout.data,
},
200,
);
} catch (error) {
console.log("from error:", error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
},
);
export default app;

View File

@@ -0,0 +1,80 @@
import { createRoute, OpenAPIHono, z } from "@hono/zod-openapi";
import { apiHit } from "../../../globalUtils/apiHits.js";
import { tryCatch } from "../../../globalUtils/tryCatch.js";
import { authMiddleware } from "../../auth/middleware/authMiddleware.js";
import { relatePallet } from "../controller/commands/relocated.js";
const app = new OpenAPIHono();
const responseSchema = z.object({
success: z.boolean().optional().openapi({ example: true }),
message: z.string().optional().openapi({ example: "user access" }),
});
app.openapi(
createRoute({
tags: ["logistics"],
summary: "Consumes material based on its running number",
method: "post",
path: "/relocate",
//middleware: authMiddleware,
description:
"Provided a running number and lot number you can consume material.",
responses: {
200: {
content: { "application/json": { schema: responseSchema } },
description: "stopped",
},
400: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
401: {
content: { "application/json": { schema: responseSchema } },
description: "Failed to stop",
},
},
}),
async (c) => {
const { data, error } = await tryCatch(c.req.json());
if (error) {
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
apiHit(c, { endpoint: "/relocate", lastBody: data });
//const authHeader = c.req.header("Authorization");
//const token = authHeader?.split("Bearer ")[1] || "";
//const payload = await verify(token, process.env.JWT_SECRET!);
try {
//return apiReturn(c, true, access?.message, access?.data, 200);
const consume = await relatePallet(data);
console.log(consume);
return c.json(
{ success: consume?.success, message: consume?.message },
200,
);
} catch (error) {
//console.log(error);
//return apiReturn(c, false, "Error in setting the user access", error, 400);
return c.json(
{
success: false,
message: "Missing data please try again",
error,
},
400,
);
}
},
);
export default app;

View File

@@ -1,34 +1,60 @@
import { getJsDateFromExcel } from "excel-date-to-js";
export const excelDateStuff = (serial: number, time: any = 0) => {
// console.log(serial);
// add 5 hours or the offset to utc
// export const excelDateStuff = (serial: number, time?: any) => {
// // add 5 hours or the offset to utc
// get the local timezone
const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number;
// // get the local timezone
// const localoffset = new Date().getTimezoneOffset() / 60; // then divide by 60 to get the true number;
if (serial % 1 === 0) {
time = 800;
}
// if (!time) {
// time = 800;
// }
const addHours = serial + localoffset / 24;
//console.log(getJsDateFromExcel(addHours));
if (typeof serial !== "number" || serial <= 0) {
return "invalid Date";
}
// const addHours = serial + localoffset / 24;
// //console.log(getJsDateFromExcel(addHours));
// if (typeof serial !== "number" || serial <= 0) {
// return "invalid Date";
// }
const date = getJsDateFromExcel(addHours); // base date from Excel serial
// const date = getJsDateFromExcel(addHours); // base date from Excel serial
if (time != 0) {
// convert the time over to hour and min
const hours = Math.floor(time / 100);
const minutes = time % 100;
date.setHours(hours);
date.setMinutes(minutes);
}
//console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
// if (time != 0) {
// // convert the time over to hour and min
// const hours = Math.floor(time / 100);
// const minutes = time % 100;
// date.setHours(hours);
// date.setMinutes(minutes);
// }
// //console.log(date.toLocaleString("en-US"), getJsDateFromExcel(addHours));
//console.log(serial);
//console.log(date.toISOString());
return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred
// //console.log(serial);
// console.log(date.toISOString(), serial, time);
// return date.toISOString(); //.toLocaleString("en-US"); // or .toISOString() if preferred
// };
export const excelDateStuff = (serial: number, time?: any) => {
if (typeof serial !== "number" || serial <= 0) {
return "invalid Date";
}
// Default time to 8:00 AM if not provided
if (!time) {
time = 800;
}
// Get base date from Excel serial (this gives you UTC midnight)
const date = getJsDateFromExcel(serial);
const localOffset = new Date().getTimezoneOffset() / 60;
const hours = Math.floor(time / 100);
const minutes = time % 100;
// Set the time in UTC
date.setUTCHours(hours + localOffset);
date.setUTCMinutes(minutes);
date.setUTCSeconds(0);
date.setUTCMilliseconds(0);
//console.log(date.toISOString(), serial, time);
return date.toISOString();
};

View File

@@ -4,95 +4,92 @@ import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sendEmail } from "../sendMail.js";
import { bow2incoming } from "../../../sqlServer/querys/notifications/bow2henkel.js";
import { sendEmail } from "../sendMail.js";
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
/**
* Pass the entire notification over
*/
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"reprinting",
"notify",
`There are no emails set for ${notifyData.name}`
);
return;
}
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"reprinting",
"notify",
`There are no emails set for ${notifyData.name}`,
);
return;
}
//let labels: Labels[];
//let labels: Labels[];
const { data: l, error: labelError } = await tryCatch(
query(
bow2incoming.replace(
"[time]",
notifyData.notifiySettings.processTime
),
"Label Reprints"
)
);
const labels: any = l?.data as any;
if (labelError) {
createLog(
"error",
"reprinting",
"notify",
`Failed to get the labels: ${labelError}`
);
return;
}
const { data: l, error: labelError } = await tryCatch(
query(
bow2incoming.replace("[time]", notifyData.notifiySettings.processTime),
"Label Reprints",
),
);
const labels: any = l?.data as any;
if (labelError) {
createLog(
"error",
"reprinting",
"notify",
`Failed to get the labels: ${labelError}`,
);
return;
}
if (labels.length > 0) {
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: "Alert! New incoming goods has been received",
template: "bow2IncomingGoods",
context: {
items: labels,
time: notifyData.notifiySettings.processTime,
},
};
if (labels.length > 0) {
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: "Alert! New incoming goods has been received",
template: "bow2IncomingGoods",
context: {
items: labels,
time: notifyData.notifiySettings.processTime,
},
};
const sentEmail = await sendEmail(emailSetup);
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"reprinting",
"notify",
"Failed to send email, will try again on next interval"
);
return;
}
if (!sentEmail.success) {
createLog(
"error",
"reprinting",
"notify",
"Failed to send email, will try again on next interval",
);
return;
}
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// update the last time ran
// update the last time ran
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notifyData.notifiySettings,
prodID: labels[0].IdEtikettenHistorie,
},
})
.where(eq(notifications.name, notifyData.name))
);
} else {
return;
}
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...notifyData.notifiySettings,
prodID: labels[0].IdEtikettenHistorie,
},
})
.where(eq(notifications.name, notifyData.name)),
);
} else {
return;
}
};
export default notification;

View File

@@ -0,0 +1,108 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
export interface Labels {
IdEtikettenHistorie?: number;
}
const notification = async (notifyData: any) => {
/**
* Pass the entire notification over
*/
createLog("debug", "reprinting", "notify", `monitoring ${notifyData.name}`);
// validate if there are any emails.
if (notifyData.emails === "") {
createLog(
"error",
"reprinting",
"notify",
`There are no emails set for ${notifyData.name}`,
);
return;
}
const cycleCountCheck = sqlQuerySelector("cycleCountCheck.query") as SqlQuery;
if (!cycleCountCheck.success) {
console.log("Failed to load the query: ", cycleCountCheck.message);
return;
}
const { data: c, error: cError } = await tryCatch(
query(
cycleCountCheck.query.replace("[timeTest]", notifyData.checkInterval),
"Cycle count check",
),
);
const cycle: any = c?.data ?? ([] as any);
//console.log(cycle);
if (cError) {
createLog(
"error",
"reprinting",
"notify",
`Failed to get the labels: ${cError}`,
);
return;
}
if (cycle.length > 0) {
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: `Alert! RowBlocked for more than ${notifyData.checkInterval} min(s)`,
template: "cycleCountCheck",
context: {
checkTime: notifyData.checkInterval,
items: cycle,
},
};
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"reprinting",
"notify",
"Failed to send email, will try again on next interval",
);
return;
}
// // update the last time we ran and the prod id
// const notifUpdate = {
// prodID: labels[0].IdEtikettenHistorie,
// lastRan: nowDate(),
// };
// update the last time ran
const { data, error } = await tryCatch(
db
.update(notifications)
.set({
lastRan: sql`NOW()`,
// notifiySettings: {
// ...notifyData.notifiySettings,
// prodID: labels[0].IdEtikettenHistorie,
// },
})
.where(eq(notifications.name, notifyData.name)),
);
} else {
return;
}
};
export default notification;

View File

@@ -1,112 +1,112 @@
import { isBefore } from "date-fns";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { currentInv } from "../../../sqlServer/querys/notifications/fifoIndex/currentInv.js";
import { shippedPallets } from "../../../sqlServer/querys/notifications/fifoIndex/shippedPallets.js";
import { db } from "../../../../../database/dbclient.js";
import { fifoIndex } from "../../../../../database/schema/fifoIndex.js";
export default async function fifoIndexCheck() {
/**
* getting the shipped pallets
*/
const { data: shipped, error: eShipped } = await tryCatch(
query(shippedPallets, "notify shipped pallets")
);
/**
* getting the shipped pallets
*/
const { data: shipped, error: eShipped } = await tryCatch(
query(shippedPallets, "notify shipped pallets"),
);
const { data: currentStuff, error: eCurrentInv } = await tryCatch(
query(currentInv, "notify shipped pallets")
);
const { data: currentStuff, error: eCurrentInv } = await tryCatch(
query(currentInv, "notify shipped pallets"),
);
// console.log(shipped?.data[2]);
// console.log(currentStuff?.data[2]);
// console.log(shipped?.data[2]);
// console.log(currentStuff?.data[2]);
/**
* We want to check if the each shippened pallet is out of fifo
*/
const check = shipped?.data.map((n: any) => {
/**
* Returns all data so we know if we are in or out.
*/
//check if there are pallets older than the current one we are mapped on.
const fifoCheck = currentStuff?.data.filter(
(i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av
);
//console.log(fifoCheck.length);
if (fifoCheck.length > 0) {
// console.log("Out of fifo", {
// av: n.av,
// rn: n.runningNr,
// fRn: fifoCheck[0].runningNr,
// dates: [fifoCheck[0].prodDate, n.prodDate],
// });
}
/**
* We want to check if the each shippened pallet is out of fifo
*/
const check: any = shipped?.data.map((n: any) => {
/**
* Returns all data so we know if we are in or out.
*/
//check if there are pallets older than the current one we are mapped on.
const fifoCheck = currentStuff?.data.filter(
(i: any) => isBefore(i.prodDate, n.prodDate) && i.av === n.av,
) as any;
//console.log(fifoCheck.length);
if (fifoCheck.length > 0) {
// console.log("Out of fifo", {
// av: n.av,
// rn: n.runningNr,
// fRn: fifoCheck[0].runningNr,
// dates: [fifoCheck[0].prodDate, n.prodDate],
// });
}
return {
...n,
// currentInv: fifoCheck[0],
fifoFollowed: fifoCheck.length === 0 ? true : false,
};
});
return {
...n,
// currentInv: fifoCheck[0],
fifoFollowed: fifoCheck.length === 0 ? true : false,
};
});
/**
* lets see just the av that is our or in
*/
/**
* lets see just the av that is our or in
*/
const avCheck = (check: any) => {
/**
* This will only return the data based on out of fifo.
*/
// check how many times each av showed up
const avCounts = check.reduce((a: any, c: any) => {
if (c.fifoFollowed === false) {
const avValue = c.av;
a[avValue] = (a[avValue] || 0) + 1;
}
return a;
}, {});
const avCheck = (check: any) => {
/**
* This will only return the data based on out of fifo.
*/
// check how many times each av showed up
const avCounts = check.reduce((a: any, c: any) => {
if (c.fifoFollowed === false) {
const avValue = c.av;
a[avValue] = (a[avValue] || 0) + 1;
}
return a;
}, {});
// transform them back to an avCount Object
const result = Object.keys(avCounts).map((av) => ({
av: parseInt(av, 10),
count: avCounts[av],
}));
// transform them back to an avCount Object
const result = Object.keys(avCounts).map((av) => ({
av: parseInt(av, 10),
count: avCounts[av],
}));
return result;
};
return result;
};
const outOfFifo: any = avCheck(check);
const totalOut = outOfFifo.reduce((sum: any, c: any) => {
return sum + c.count;
}, 0);
const outOfFifo: any = avCheck(check);
const totalOut = outOfFifo.reduce((sum: any, c: any) => {
return sum + c.count;
}, 0);
/**
* add the data to the db
*/
for (let i = 0; i < check.length; i++) {
const { data: dbInsert, error: dbE } = await tryCatch(
db
.insert(fifoIndex)
.values({
lot: check[i].lot,
av: check[i].av,
runningNr: check[i].runningNr,
prodDate: check[i].prodDate,
fifoFollowed: check[i].fifoFollowed,
add_Date: check[i].add_Date,
})
.onConflictDoNothing()
);
}
/**
* add the data to the db
*/
for (let i = 0; i < check!.length; i++) {
const { data: dbInsert, error: dbE } = await tryCatch(
db
.insert(fifoIndex)
.values({
lot: check[i].lot,
av: check[i].av,
runningNr: check[i].runningNr,
prodDate: check[i].prodDate,
fifoFollowed: check[i].fifoFollowed,
add_Date: check[i].add_Date,
})
.onConflictDoNothing(),
);
}
return {
success: true,
message: "Fifo index data",
data: {
palletsOut: check,
totalShipped: shipped?.data.length,
inFifo: shipped?.data.length - totalOut,
outOfFifoData: outOfFifo,
},
};
return {
success: true,
message: "Fifo index data",
data: {
palletsOut: check,
totalShipped: shipped?.data.length,
inFifo: shipped!.data.length - totalOut,
outOfFifoData: outOfFifo,
},
};
}

View File

@@ -0,0 +1,183 @@
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { query } from "../../../sqlServer/prodSqlServer.js";
import { sqlQuerySelector } from "../../../sqlServer/utils/querySelector.utils.js";
import { sendEmail } from "../sendMail.js";
let running = false;
export default async function platToPlantEdi(notifyData: any) {
createLog("info", "plantToPlant", "notify", `monitoring ${notifyData.name}`);
if (running) {
createLog(
"info",
"plantToPlant",
"notify",
`Notifcation ${notifyData.name} is already running skipping`,
);
return;
}
running = true;
const { data: noti, error: notiError } = (await tryCatch(
db
.select()
.from(notifications)
.where(eq(notifications.name, notifyData.name)),
)) as any;
if (notiError) {
createLog(
"error",
"edi",
"notify",
"Error in getting the notification data",
);
}
// get the default emails they can be blank if as we will only add these to the end of the email from the full flow
let emails = noti[0]?.email ?? "";
const checkBol = sqlQuerySelector("checkBol.query");
if (!checkBol.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
const pLinkedB = sqlQuerySelector("palletsLinkedToBol.query");
if (!pLinkedB.success) {
createLog("error", "edi", "notify", "Error in getting the bol query data");
}
let ignoreBols: string[] = noti[0]?.notifiySettings?.processedBol ?? [];
const joinBols = ignoreBols.join(",");
let updateQuery = noti[0]?.notifiySettings?.includeAll
? checkBol?.query?.replace(
"and a.bezeichnung like '%Alpla%'",
"--and a.bezeichnung like '%Alpla%'",
)
: checkBol?.query;
const { data: b, error: bError } = (await tryCatch(
query(
updateQuery
?.replace("[timeCheck]", noti[0]?.checkInterval ?? "30")
.replace("[ignoreBols]", joinBols ?? 500) ?? "",
"Check bol",
),
)) as any;
if (bError) {
return {
success: false,
message: "Error getting newly created bols",
data: bError,
};
}
const planedByBol = new Map<string, string[]>();
for (const row of b.data) {
if (!planedByBol.has(row.bol)) {
planedByBol.set(row.bol, []);
}
planedByBol.get(row.bol)!.push(String(row.idladeplanung));
}
if (b.data.length > 0) {
// loop each bol in the system and get the bols only
for (const [bolNumber, idList] of planedByBol.entries()) {
//for (const bol of b.data) {
// run the process to get the the pallet numbers
const joinedIdLadeplanung = idList.join(",");
//console.log("BOL:", bolNumber);
//console.log("IDLadeplanung string:", joinedIdLadeplanung);
//console.log("IgnoreBols: ", joinBols);
// now get the pallets that are witing the ladeplanning
const { data: pallets, error: pError } = await tryCatch(
query(
pLinkedB?.query?.replace(
"[palLinkedToBol]",
joinedIdLadeplanung ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
);
//console.log(pallets);
// console.log("Address: ", b.data[0].addressId ?? "0");
if (b.data[0].addressId === "") return;
ignoreBols.push(bolNumber);
if (ignoreBols.length > 15) {
ignoreBols.splice(0, ignoreBols.length - 15);
}
// get the email address.
const checkBol = sqlQuerySelector("addressInfo.query");
const { data: address, error: aError } = (await tryCatch(
query(
checkBol?.query?.replace(
"[customerAddress]",
b.data[0].addressId ?? "0",
) ?? "",
"Get Pallets linked in the bol",
),
)) as any;
if (noti[0]?.emails === "") return; // no default emails
// setup the email to be sent :D
const emailSetup = {
email: `${noti[0]?.emails};${address.data[0].email ?? ""}`,
subject: `New EDI transfer Created for BOL: ${bolNumber}`,
template: "plantToPlantEdi",
context: {
items: pallets?.data ?? [],
bol: bolNumber,
//secondarySetting: notifyData.notifiySettings,
},
};
// send the email
await sendEmail(emailSetup);
// add the bols to be ignored
await db
.update(notifications)
.set({
lastRan: sql`NOW()`,
notifiySettings: {
...noti[0]?.notifiySettings,
processedBol: ignoreBols,
},
})
.where(eq(notifications.name, notifyData.name));
}
running = false;
return {
success: true,
message: "All bols have been processed",
data: [ignoreBols],
};
}
running = false;
return {
success: true,
message: "No new bols have been created",
data: [],
};
}

View File

@@ -0,0 +1,118 @@
// SELECT count(*) FROM V_EtikettenGedruckt where AnzahlGedruckterKopien > 2 and CONVERT(varchar(5), Add_Date,108) not like CONVERT(varchar(5), Upd_Date,108) and Upd_Date > DATEADD(SECOND, -30,getdate()) and VpkVorschriftBez not like '%$%'
import { errorMonitor } from "node:events";
import { eq, sql } from "drizzle-orm";
import { db } from "../../../../../database/dbclient.js";
import { notifications } from "../../../../../database/schema/notifications.js";
import { settings } from "../../../../../database/schema/settings.js";
import { tryCatch } from "../../../../globalUtils/tryCatch.js";
import { createLog } from "../../../logger/logger.js";
import { sendEmail } from "../sendMail.js";
export interface DownTime {
downTimeId?: number;
machineAlias?: string;
}
export default async function tooManyErrors(notifyData: any) {
// we will over ride this with users that want to sub to this
// a new table will be called subalerts and link to the do a kinda linkn where the user wants it then it dose subId: 1, userID: x, notificationId: y. then in here we look up the userid to get the email :D
// this could then leave the emails in the notificaion blank and let users sub to it.
//console.log(notifyData);
if (notifyData.emails === "") {
createLog(
"error",
"notify",
"notify",
`There are no emails set for ${notifyData.name}`,
);
return;
}
// console.log(data.secondarySetting[0].duration);
const plant = await db
.select()
.from(settings)
.where(eq(settings.name, "plantToken"));
console.log(plant[0].value);
// console.log(
// errorQuery
// .replace("[time]", notifyData.checkInterval)
// .replace("[errorCount]", notifyData.notifiySettings.errorCount),
// errorLogQuery.replace("[time]", notifyData.checkInterval),
// );
let errorLogData: any = [];
try {
const errorData = await db.execute(sql`
SELECT 'error' AS level, COUNT(*) AS error_count
FROM public.logs
WHERE level = 'error'
AND "add_Date" > now() - INTERVAL ${sql.raw(`'${notifyData.checkInterval} minutes'`)}
GROUP BY level
HAVING COUNT(*) >= ${notifyData.notifiySettings.errorCount}
`);
if (
errorData.length > 0
// && downTime[0]?.downTimeId > notifyData.notifiySettings.prodID
) {
const errorLogs = await db.execute(sql`
select* from public.logs where level = 'error' and "add_Date" > now() - INTERVAL ${sql.raw(`'${notifyData.checkInterval} minutes'`)} order by "add_Date" desc;
`);
errorLogData = errorLogs;
//send the email :D
const emailSetup = {
email: notifyData.emails,
subject: `Alert! ${plant[0].value} has encountered ${
errorLogData.length
} ${errorLogData.length > 1 ? "errors" : "error"} in the last ${notifyData.checkInterval} min`,
template: "tooManyErrors",
context: {
data: errorLogData.slice(0, 100),
count: notifyData.notifiySettings.errorCount,
time: notifyData.checkInterval,
},
};
//console.log(emailSetup);
const sentEmail = await sendEmail(emailSetup);
if (!sentEmail.success) {
createLog(
"error",
"notify",
"notify",
"Failed to send email, will try again on next interval",
);
return {
success: false,
message: "Failed to send email, will try again on next interval",
data: sentEmail,
};
}
}
} catch (err) {
console.log(err);
createLog(
"error",
"notify",
"notify",
`Error from running the downtimeCheck query: ${err}`,
);
return {
success: false,
message: "Error running error data",
data: err,
};
}
return {
success: true,
message: "Error log checking ran",
data: errorLogData ?? [],
};
}

Some files were not shown because too many files have changed in this diff Show More