Compare commits

..

130 Commits

Author SHA1 Message Date
dcfa56bdb9 fix(notify): fixed to plantto plant that would cause multiple emails to be sent and never update 2026-03-11 15:30:53 -05:00
ea92422bb1 feat(notification): plant to plant edi 2026-03-10 08:18:29 -05:00
2111a5fdc9 refactor(sql): changes to improve the job disable 2026-02-18 08:53:19 -06:00
6edd20585f refactor(stats): added in mastermacro version 2026-02-18 08:52:29 -06:00
a9759795c4 fix(scripts): changed the fake scanning to be more readable 2026-02-16 19:15:32 -06:00
32f26a1725 fix(quality): hoping to finally find the bud that kills me everynight 2026-02-16 19:05:41 -06:00
60533beed5 feat(sql): job disabling scripts 2026-02-16 19:05:08 -06:00
24ced97b6d feat(notification): added cycle count check 2026-02-16 19:04:38 -06:00
dc1d342799 fix(scanner): if host or port not sent over stop the connection right away 2026-02-16 19:04:04 -06:00
44d0cb63cf refactor(sql): moved new queries to there own folder to make it more easy to work and migrate 2026-02-16 19:01:23 -06:00
ace73fa919 refactor(sendmail): updated the smtp per alpla needs 2026-02-16 18:59:12 -06:00
316af4233f refactor(stats): added sheet version check in 2026-02-16 18:58:17 -06:00
36a805c652 refactor(scripts): create finance bol 2026-02-16 09:40:19 -06:00
460bc3d24a feat(query selector): queryselector from file based vs cp to ts filesz 2026-02-16 09:40:00 -06:00
ec201fcfb5 refactor(sql): full changes to localhost if on produciton server 2026-02-16 09:39:35 -06:00
914ad46c43 refactor(sql server): changes to look at localhost if in production 2026-02-16 09:38:55 -06:00
b96c546ed3 refactor(notify): changed to only allow max 100 errors in the email 2026-02-16 09:38:31 -06:00
29b3be41a1 build(notification): fixed fifo index ts errors 2026-02-16 09:38:10 -06:00
16edf58025 refactor(eom): changes to hist inv 2026-02-16 09:37:40 -06:00
775627f215 feat(scanner): tcp scanner connection based on env var no more db stuff 2026-02-16 09:37:14 -06:00
4e70fae69b refactor(api docs): added/changed docs 2026-02-16 09:36:44 -06:00
24dd109a21 fix(commandlog): changes to the log table 2026-02-13 16:08:23 -06:00
38b57a00cc refactor(datamart): article changes to add pet-g 2026-02-13 16:03:26 -06:00
f8070db95f fix(sqlserver): changed to proper pool connection 2026-02-13 16:02:43 -06:00
10e9dc430c fix(notification): limited to 1000 max errors 2026-02-13 15:59:38 -06:00
6b669ccd9c fix(labelinfo): corrected the query on label info for external 2026-02-13 14:51:15 -06:00
d9a10d98a1 refactor(sendmail): change the send mail function from noreply to donotreply 2026-02-13 14:50:44 -06:00
e64dc7c013 refactor(ocp): removed zechetti 2 from this silly thing for now 2026-02-13 14:50:07 -06:00
d63138d746 helper scripts 2026-02-03 15:40:51 -06:00
84a28f2d01 added relocate 2026-02-03 15:40:41 -06:00
9be6614972 fix(ocp): more material check work 2026-01-19 07:50:27 -06:00
9d0db71f6a fix(datamart): psiPlanning was looking at thertical but a rare case this would look at last years da
and be missed and not pull correct data, switched to plan end
2026-01-15 13:28:18 -06:00
3cc55436f3 refactor(psi): old planning numbers to revert back to maybe 2026-01-08 20:11:40 -06:00
124fde07e0 refactor(psi): planning numbers refactored to deal with a bad downsync that caused negative numbers 2026-01-08 20:08:31 -06:00
b15d0d7322 refactor(datamart): delivery by date range updates 2026-01-08 20:08:03 -06:00
0680f332fb refactor(manual print): added new option for mulitple tags on pallet 2026-01-08 20:07:28 -06:00
46bf310dce refactor(datamart): changed the getDelbyDateRange to the new 2.0 way 2026-01-05 15:17:15 -06:00
0dda6ae744 fix(notifications): fixed a type in the alert being sent out only showing 1 alert instead of many 2026-01-05 10:27:09 -06:00
1b59cdd3a4 fix(psi): correcrtions to account for the tiem offset in the psi 2026-01-02 11:07:45 -06:00
56934216f7 fix(sql): fix connection issues in the sql connection loosing it 2025-12-31 07:54:26 -06:00
e8a2ef8b85 refactor(ocp): plc reading changes to disconnect and reconnect
it was found that there were some errors that spammed the log and caused the server to actually stop
responding and crash weirdly so added a disconnect and reconnect back. so we can figure out whats
going on.
2025-12-30 10:55:28 -06:00
6cbffa4ac5 feat(notification): error monitoring
if there are more than 10 errors in a 15min window sends email to alert someone
2025-12-30 10:54:09 -06:00
09f16f4e62 refactor(datamart): added in 2 new queroes 2025-12-30 08:43:03 -06:00
461acb2b16 fix(datamart): removed limitation on inhousedelivery 2025-12-30 07:21:40 -06:00
0d05c66a2b fix(quality): changes to the logging to reduce un nessasary logging 2025-12-30 07:13:20 -06:00
096cc18477 refactor(datamart): psi work 2025-12-30 07:12:38 -06:00
f3333ce020 fix(dm): abbott truck time corrections 2025-12-18 08:47:12 -06:00
8e3d2b3d95 fix(dm): new scj custom mapping added for westbend 2025-12-12 10:53:33 -06:00
501709546d feat(dm): abbott trucklist will do orders and forecast now 2025-12-11 15:56:34 -06:00
2b5e77993b test(dm): starts on abbot forecast to be brought over from the original truck list 2025-12-11 14:43:50 -06:00
6efaffbb17 fix(printers): add null case for the printerID 2025-12-11 14:43:22 -06:00
90ddbca2e7 fix(rfid): changes to log all reads and also replace some trailing text from a hex 2025-12-11 14:42:51 -06:00
7a9ea16f48 fix(dm): energizer forecast to correct the date and qty push over 2025-12-11 14:42:22 -06:00
420826de9b fix(frontend): transfer lots style adjustment 2025-12-11 14:41:48 -06:00
dc2d3718fa fix(dm): energizer orders missing remark 2025-12-11 14:41:22 -06:00
5013228384 feat(inhouse): delivery by pallet and lot added 2025-12-08 13:16:23 -06:00
4459742cf0 feat(datamart): inhouse delivery added 2025-12-08 13:15:59 -06:00
070c3ee975 fix(mainmaterial): added missing return 2025-12-08 13:15:37 -06:00
8ac92888ad test(mobile): keyence added 2025-12-08 08:37:25 -06:00
567579ef35 fix(transfers): corrected to be a int vs float 2025-12-08 08:37:07 -06:00
8d90f27514 fix(rfid): corrected the link to reset 2025-12-08 08:36:47 -06:00
722b23a321 fix(rfid): changed the tag reading to have a little more flexable 2025-12-08 08:36:25 -06:00
ba48c5307f style(transfer to next lot): spelling issue 2025-12-08 08:35:22 -06:00
30d2ec0477 fix(materials): if no machine stop the check and fail so it manual needs tried again 2025-12-08 08:34:52 -06:00
d3c6444491 chore(release): 1.9.0 2025-12-02 18:30:54 -06:00
12345c0b64 fix(commands): corrections to allow external labels to be consumed and transfered 2025-12-02 18:29:30 -06:00
6833dfc992 refactor(quality): added a check to monior #7 as well 2025-12-02 15:25:05 -06:00
ac27a286c0 fix(lot transfer): changes to make it so the reprint and return do not happen instantly 2025-12-02 15:24:10 -06:00
a3dba6cc9d refactor(helpercommands): removed the remove as reusabele 2025-12-02 15:22:02 -06:00
320dd47aea refactor(sql): some changes to help with sql connection on random disconnect 2025-12-02 15:21:34 -06:00
712a6eebdf fix(lstv2): added in a close function to stop crashing the server 2025-11-30 10:31:27 -06:00
f226c5644c fix(dm): type in customer article number 2025-11-30 10:30:54 -06:00
d605225e48 refactor(swagger): corrected the name displaced 2025-11-26 08:53:05 -06:00
8e7f1eb098 feat(datamart): active article moved over to the new version 2025-11-26 08:52:28 -06:00
59c6fd0117 fix(labeling): added in a catch to avoid rouge lots 2025-11-26 08:29:33 -06:00
2607fd3026 feat(swagger): added in the start of swagger where all the common and useable endpoints will be 2025-11-25 17:02:20 -06:00
bdb4bfc53d chore(module updates): just updated all the modules 2025-11-25 16:12:12 -06:00
c1816c07ff refactor(contorller): only install npm production modules dont install everything 2025-11-25 16:11:41 -06:00
7311372ba8 feat(dm): added article description into the historical data 2025-11-25 14:58:47 -06:00
cd53460bec chore(release): 1.8.0 2025-11-25 14:36:45 -06:00
7e15e5d7bc feat(settings): final migration of settings and edits added 2025-11-25 14:36:06 -06:00
3193e07e47 fix(prodendpoint): if we have a real error just report it dont actually crash 2025-11-24 15:22:47 -06:00
40bc19aa6f feat(migration): settings migration from old app all is now in the new app 2025-11-24 15:22:12 -06:00
90920e8fba refactor(app): refactored how we have the pkg.json so we can reduce on size of the app 2025-11-24 15:21:17 -06:00
f8cf0851a8 test(ti intergration): added the place holder for intergration of comments for customer 2025-11-24 15:20:43 -06:00
efdab5bafc test(controller): added in server port that might night be needed but oh well 2025-11-24 15:19:38 -06:00
700346d809 refactor(ocp): work around for zechetti 2 until we can monitor more closing
currently when 2 pallets from the same line come over at the same time the second one dose not print
when running off lst.
2025-11-23 10:55:32 -06:00
a96b85bc53 feat(dm): new endpoint to get the forecast data 2025-11-21 15:37:22 -06:00
b23bb0db31 feat(form stuff): added in a searchable dropdown and added to new forklifts 2025-11-20 20:21:43 -06:00
8c0f67ca35 refactor(forklifts): more refactoring to improve during production 2025-11-20 19:47:52 -06:00
7b6c9bdfbf feat(forklifts): added the ability to add new forklifts in 2025-11-20 17:51:06 -06:00
7b28f4e9ef refactor(quality): more changes to the system to perfect it 2025-11-20 15:08:29 -06:00
a30eebf5d3 test(materials per day): work on getting this running better 2025-11-19 18:42:58 -06:00
9aa0b31278 fix(silo adjustments): added supervisor to see the actual page so it matches the sidbard 2025-11-19 18:42:37 -06:00
33cbb17a0e refactor(cards): trying to make the cards fit the odd shaped screens better 2025-11-19 18:42:13 -06:00
242ff6277a refactor(quality): added some new options plus cancel button 2025-11-19 18:41:44 -06:00
566754bf2e feat(quality): added in comments 2025-11-17 18:01:24 -06:00
50b7c9cac5 fix(label ratio): correction to the endpoint 2025-11-17 18:01:05 -06:00
b0ac326752 feat(scroll view): added in a scroll view to for quality so it dose not go over the end of the page 2025-11-17 17:21:53 -06:00
9572b71592 feat(quality): added location moved to to the table 2025-11-17 17:07:41 -06:00
96c3e4c24a fix(quality): request was missing the forced selection 2025-11-17 17:07:23 -06:00
aba1668d2c fix(register): added the ability to put in _ for user name 2025-11-17 17:00:26 -06:00
20fc286069 test(mobile): stallion intergration starts 2025-11-16 20:21:08 -06:00
75c0659658 test(mobile): more testing on ota 2025-11-16 20:11:34 -06:00
564f0b5add feat(materials per day): more work on materials per day 2025-11-15 16:22:52 -06:00
7b630d5c0b test(android app): this is the start to the android app 2025-11-15 16:22:19 -06:00
eb6b9ce388 fix(quality): corrected url and perms for quality link 2025-11-14 11:50:28 -06:00
c777395b03 feat(quality): priority ranking added 2025-11-14 11:49:57 -06:00
c509c7fe28 feat(notify): material per day for the next 90 days 2025-11-12 20:22:53 -06:00
6f632ecd68 fix(quality request): bug fixes
lots of bug fixes plug 2 new counters, quality inspect time and warehouse return time
2025-11-12 20:22:21 -06:00
fef0303cd6 fix(dm): fixes to validate auth before submiting incase someone stays on the page to long 2025-11-12 20:21:14 -06:00
314ab049bb test(mobile): testing for ota updated on android scanner 2025-11-12 20:20:44 -06:00
5277ddfc51 ci(settings): changes to vs code as it was being way to slow 2025-11-12 20:20:08 -06:00
b6030de4f4 feat(mobile): ota updates added 2025-11-10 20:19:19 -06:00
1084cede04 refactor(inv with rn): now includes batch number for tetra 2025-11-10 17:22:48 -06:00
ca866bf8c6 fix(uom): correction to how we do the uom check so we dont just look for the space 2025-11-10 15:14:56 -06:00
9d793d2205 fix(historical date): added so we can have all dates 2025-11-10 15:14:19 -06:00
247010d48f fix(plc zeccetti): changes to improve the timing on the zecetti more to be done 2025-11-07 10:08:56 -06:00
7c40f028c8 fix(historical inv): corrected the way the date can come over to allow for yyyy-mm-dd or with /
the date was coming over in utc format somnetimes and others times local.

close #1 correction to the date formats
2025-11-07 10:08:12 -06:00
65304f61ce feat(invoice form): added new invoice form 2025-11-05 21:58:28 -06:00
6ce4d84fd0 refactor(leases): removed main server until i have a better way to sync them 2025-11-04 22:12:41 -06:00
2e05f6eeee feat(invoices): added invoice + linking to forklift 2025-11-04 22:12:08 -06:00
577584ef4d feat(forklifts): added the crud 2025-11-04 22:11:16 -06:00
360c0163f1 fix(preprint): added the correct to string for the preprint stuff 2025-11-04 20:16:59 -06:00
bd7bea8db6 feat(leases): added in leases and move table to reuseable component 2025-11-04 20:16:14 -06:00
b1c56ee4bb fix(correction): the name needed to be correct to have a proper tempalte 2025-11-04 11:27:22 -06:00
79f4121311 feat(templates): added bug repot template 2025-11-04 11:22:49 -06:00
d17edb1f9c feat(missing inv): adding a way to check for missing data in case it dose pull on the correct days 2025-11-03 18:01:58 -06:00
8fca201e04 fix(nav): added missing add card button 2025-11-03 18:01:15 -06:00
b4064e8769 feat(barcode gen): added the missing link 2025-11-03 18:00:50 -06:00
317 changed files with 69607 additions and 7047 deletions

33
.gitea/ISSUE_TEMPLATE.md Normal file
View File

@@ -0,0 +1,33 @@
---
name: Bug Report
about: Report a bug to help us improve.
title: "[BUG] "
labels: ["bug", "needs-triage"]
---
### Describe the bug
A clear and concise description of what the bug is.
### Steps to reproduce
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
### Expected behavior
A clear and concise description of what you expected to happen.
### Screenshots
If applicable, add screenshots to help explain your problem.
### Gitea Version
e.g., 1.25.0
### Additional context
Add any other context about the problem here.

1
.gitignore vendored
View File

@@ -13,6 +13,7 @@ controllerBuilds
# ignoring the old app that will be built into this one to make deploying faster and more easy as we do the migration
lstV2/frontend/.tanstack
keys
# Logs
logs
*.log

View File

@@ -1,5 +1,6 @@
dist
frontend/dist
mobileLst/dist
lstDocs/build
migrations
Dockerfile

View File

@@ -1,48 +0,0 @@
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"workbench.colorTheme": "Default Dark+",
"prettier.tabWidth": 4,
"terminal.integrated.env.windows": {},
"editor.formatOnSave": true,
"[javascript]": {
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": ["alpla", "alplamart", "alplaprod", "ppoo"]
}

51
.vscode/settings.json vendored
View File

@@ -7,13 +7,60 @@
"source.fixAll.biome": "explicit",
"source.organizeImports.biome": "explicit"
},
"[javascript]": {
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": true
},
"[typescriptreact]": {
"editor.formatOnSave": true
},
"[json]": {
"editor.formatOnSave": true
},
"[graphql]": {
"editor.formatOnSave": true
},
"[handlebars]": {
"editor.formatOnSave": true
},
"[go]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "golang.go"
},
"[powershell]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-vscode.powershell" // requires PowerShell extension
},
"[bat]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format" // supports .sh, .bat, .cmd
},
"[cmd]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "foxundermoon.shell-format"
},
// Optional: Configure goimports instead of gofmt
"go.formatTool": "goimports",
"cSpell.words": [
"acitve",
"actaully",
"alpla",
"alplamart",
"alplaprod",
"autoconsume",
"intiallally",
"ppoo",
"prodlabels"
]
"prodlabels",
"rfid"
],
"gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db",
"gitea.instanceURL": "https://git.tuffraid.net",
"gitea.owner": "cowch",
"gitea.repo": "lst"
}

View File

@@ -1,5 +1,138 @@
# All Changes to LST can be found below.
## [1.9.0](https://git.tuffraid.net/cowch/lst/compare/v1.8.0...v1.9.0) (2025-12-03)
### 📝 Chore
* **module updates:** just updated all the modules ([bdb4bfc](https://git.tuffraid.net/cowch/lst/commits/bdb4bfc53d24f37f0e7098ea828cf418d58d5224))
### 🌟 Enhancements
* **datamart:** active article moved over to the new version ([8e7f1eb](https://git.tuffraid.net/cowch/lst/commits/8e7f1eb09811fcf3ea49b95b0ba9a8f55b9c4184))
* **dm:** added article description into the historical data ([7311372](https://git.tuffraid.net/cowch/lst/commits/7311372ba8eb901b51972ca216152bcfc2b009af))
* **swagger:** added in the start of swagger where all the common and useable endpoints will be ([2607fd3](https://git.tuffraid.net/cowch/lst/commits/2607fd3026ed0b5777a5598aa3498ffc67baa012))
### 🛠️ Code Refactor
* **contorller:** only install npm production modules dont install everything ([c1816c0](https://git.tuffraid.net/cowch/lst/commits/c1816c07ff5ac939b0997d314a9da624a4a66b7a))
* **helpercommands:** removed the remove as reusabele ([a3dba6c](https://git.tuffraid.net/cowch/lst/commits/a3dba6cc9db147ff4765fef648867e50878a6ac8))
* **quality:** added a check to monior [#7](https://git.tuffraid.net/cowch/lst/issues/7) as well ([6833dfc](https://git.tuffraid.net/cowch/lst/commits/6833dfc9929741203083b01726b83a6c8d61d308))
* **sql:** some changes to help with sql connection on random disconnect ([320dd47](https://git.tuffraid.net/cowch/lst/commits/320dd47aea017b4ff219b07e363ef87ec8523b82))
* **swagger:** corrected the name displaced ([d605225](https://git.tuffraid.net/cowch/lst/commits/d605225e48bca66f915ce0db448aa61933891986))
### 🐛 Bug fixes
* **commands:** corrections to allow external labels to be consumed and transfered ([12345c0](https://git.tuffraid.net/cowch/lst/commits/12345c0b6442c3abd309f660bb43216def9abb89))
* **dm:** type in customer article number ([f226c56](https://git.tuffraid.net/cowch/lst/commits/f226c5644cc2b93b9d967962bd6f82b3e506c8c0))
* **labeling:** added in a catch to avoid rouge lots ([59c6fd0](https://git.tuffraid.net/cowch/lst/commits/59c6fd011728dff50bfa3233d6095c396d0b1999))
* **lot transfer:** changes to make it so the reprint and return do not happen instantly ([ac27a28](https://git.tuffraid.net/cowch/lst/commits/ac27a286c07733333703d8421cfa525691363e54))
* **lstv2:** added in a close function to stop crashing the server ([712a6ee](https://git.tuffraid.net/cowch/lst/commits/712a6eebdfef0ce2b99155d23422ddc7e5e0daad))
## [1.8.0](https://git.tuffraid.net/cowch/lst/compare/v1.7.0...v1.8.0) (2025-11-25)
### 📝 Chore
* **misc:** api doc dates and db changes ([52089ec](https://git.tuffraid.net/cowch/lst/commits/52089ecdf06d9e5983afb6a24953d066a0456949))
### 📈 Project changes
* **settings:** changes to vs code as it was being way to slow ([5277ddf](https://git.tuffraid.net/cowch/lst/commits/5277ddfc51632651059e35b799ddf2a63d70cb70))
### 📝 Testing Code
* **android app:** this is the start to the android app ([7b630d5](https://git.tuffraid.net/cowch/lst/commits/7b630d5c0b1175f06c866b1238754e455d3a27c9))
* **controller:** added in server port that might night be needed but oh well ([efdab5b](https://git.tuffraid.net/cowch/lst/commits/efdab5bafca2a46d73de7ab1d776d283d5294aa3))
* **materials per day:** work on getting this running better ([a30eebf](https://git.tuffraid.net/cowch/lst/commits/a30eebf5d34c77c6c7118faf01776651f8888547))
* **mobile:** more testing on ota ([75c0659](https://git.tuffraid.net/cowch/lst/commits/75c0659658cbbb7e983df5538538d6429a325379))
* **mobile:** stallion intergration starts ([20fc286](https://git.tuffraid.net/cowch/lst/commits/20fc2860690f59851ca1e0251df84854c9d2ba01))
* **mobile:** testing for ota updated on android scanner ([314ab04](https://git.tuffraid.net/cowch/lst/commits/314ab049bb650120489259e920e52fd530f0ce41))
* **ti intergration:** added the place holder for intergration of comments for customer ([f8cf085](https://git.tuffraid.net/cowch/lst/commits/f8cf0851a86a6923a8941774efe17f93cb92e984))
### 🛠️ Code Refactor
* **all server stats:** added a 5 second time out if it dose not reach it just stops ([2133b94](https://git.tuffraid.net/cowch/lst/commits/2133b94a1dd84e16bb5a4b8fe215739a4b355223))
* **app:** refactored how we have the pkg.json so we can reduce on size of the app ([90920e8](https://git.tuffraid.net/cowch/lst/commits/90920e8fba4757297e0e42e86f80d5f14434a48e))
* **base modules:** removed the log spam ([6d27a7a](https://git.tuffraid.net/cowch/lst/commits/6d27a7aa6395a094d8763ba3fde0bdb81a7e3082))
* **cards:** trying to make the cards fit the odd shaped screens better ([33cbb17](https://git.tuffraid.net/cowch/lst/commits/33cbb17a0ede136a9be96e47ba0a7a66468b1ebc))
* **forklifts:** more refactoring to improve during production ([8c0f67c](https://git.tuffraid.net/cowch/lst/commits/8c0f67ca351778405279f7e225ee8dae654033f9))
* **inv with rn:** now includes batch number for tetra ([1084ced](https://git.tuffraid.net/cowch/lst/commits/1084cede04d43ec2b2c22c43c6e701bad4701981))
* **leases:** removed main server until i have a better way to sync them ([6ce4d84](https://git.tuffraid.net/cowch/lst/commits/6ce4d84fd00fa446ccb7d1bbad28680f045fae52))
* **ocp page:** using the name of the url now vs the settings ([d406a92](https://git.tuffraid.net/cowch/lst/commits/d406a92f3d5d6a8902164e9182717912debae804))
* **ocp:** work around for zechetti 2 until we can monitor more closing ([700346d](https://git.tuffraid.net/cowch/lst/commits/700346d80972e464d0a9ba62bba4dc0ed949cdee))
* **quality:** added some new options plus cancel button ([242ff62](https://git.tuffraid.net/cowch/lst/commits/242ff6277a1f407fbed2951d30dd6cf1ee32dd60))
* **quality:** more changes to the system to perfect it ([7b28f4e](https://git.tuffraid.net/cowch/lst/commits/7b28f4e9ef32a4fc90a0b4b16953b6cead096cac))
* **serverlist:** refactored to also show uptime and other info about the server ([e1e659f](https://git.tuffraid.net/cowch/lst/commits/e1e659f9b14f22474f919350f07b02b45141aa63))
* **types:** moved the item type to the sidebar to keep it more clean ([5023d4d](https://git.tuffraid.net/cowch/lst/commits/5023d4d129737cf6e0609592e5606a20a0f3728b))
* **wrapper:** removed the logs so its not spamming the server ([b8a9aa5](https://git.tuffraid.net/cowch/lst/commits/b8a9aa5132c7606fcccae8f058a77a11a8ed552a))
### 🐛 Bug fixes
* **comments:** added new role to put comments in ([1283a63](https://git.tuffraid.net/cowch/lst/commits/1283a63b5fd71fb44f7ec7789f670f8af7eafbb8))
* **correction:** the name needed to be correct to have a proper tempalte ([b1c56ee](https://git.tuffraid.net/cowch/lst/commits/b1c56ee4bb32c0dbf86e0164614fb3f1ecaf262d))
* **dm:** correction to passing the username over for the importing of the file ([a7a9aa2](https://git.tuffraid.net/cowch/lst/commits/a7a9aa2874ddd1391b56983db51cfabd8e789213))
* **dm:** fixes to validate auth before submiting incase someone stays on the page to long ([fef0303](https://git.tuffraid.net/cowch/lst/commits/fef0303cd6fdc9cc8cf9f9f4ad674a8b725691f3))
* **file name wrong:** fixed the name of the file for getCompanies ([8996da7](https://git.tuffraid.net/cowch/lst/commits/8996da7eb46a8b5bdfe44ee74a676e701d64fdbf))
* **forecast table:** correction to the customer article number ([ebe5c0b](https://git.tuffraid.net/cowch/lst/commits/ebe5c0bd5a883b1cbe87f7f9932fd803e80e7fae))
* **historical date:** added so we can have all dates ([9d793d2](https://git.tuffraid.net/cowch/lst/commits/9d793d22051c585ed224bfaf16e2a9b60bb02635))
* **historical inv:** corrected the way the date can come over to allow for yyyy-mm-dd or with / ([7c40f02](https://git.tuffraid.net/cowch/lst/commits/7c40f028c88d7fd78ac8ab75c172d808783fc641)), closes [#1](https://git.tuffraid.net/cowch/lst/issues/1)
* **historicalinv:** removed the second running one that caused duplicates ([a6cc17c](https://git.tuffraid.net/cowch/lst/commits/a6cc17ccb12b0d99ffdb1d371c5daf3bbb91f7ba))
* **label ratio:** correction to the endpoint ([50b7c9c](https://git.tuffraid.net/cowch/lst/commits/50b7c9cac5cd6923b08a8705fc8cb41530ec5b02))
* **manual print:** fixed so the print disables ([d3e8e94](https://git.tuffraid.net/cowch/lst/commits/d3e8e941103dc0118066e8790e7c27e5f035a6c5))
* **nav:** added missing add card button ([8fca201](https://git.tuffraid.net/cowch/lst/commits/8fca201e0463aba7ecace61f8dfb737e2acf4140))
* **ocp:** made corrections to the ocp page in dayton ([92af726](https://git.tuffraid.net/cowch/lst/commits/92af7262f60514501b903f5307d34e9154cc9034))
* **plc zeccetti:** changes to improve the timing on the zecetti more to be done ([247010d](https://git.tuffraid.net/cowch/lst/commits/247010d48f10ebb02a1b98c5df101134e8dab250))
* **preprint:** added the correct to string for the preprint stuff ([360c016](https://git.tuffraid.net/cowch/lst/commits/360c0163f1d3135d9c1c3788ac53dc8e0757c441))
* **prodendpoint:** if we have a real error just report it dont actually crash ([3193e07](https://git.tuffraid.net/cowch/lst/commits/3193e07e4707d055517b15f77ac117fefe07de12))
* **quality request:** bug fixes ([6f632ec](https://git.tuffraid.net/cowch/lst/commits/6f632ecd6831456c6e3c9973bc0ce7feb229aeec))
* **quality:** corrected url and perms for quality link ([eb6b9ce](https://git.tuffraid.net/cowch/lst/commits/eb6b9ce388c5dea35f95a9403765e7d330b664f9))
* **quality:** request was missing the forced selection ([96c3e4c](https://git.tuffraid.net/cowch/lst/commits/96c3e4c24adbdc59d11f7ea43888e1c47d061f90))
* **register:** added the ability to put in _ for user name ([aba1668](https://git.tuffraid.net/cowch/lst/commits/aba1668d2cab63a031657fb7c9f2bfb9777fa72a))
* **servers:** changed the server name to be unique ([a7bde5e](https://git.tuffraid.net/cowch/lst/commits/a7bde5e4eb41c597f94302dd2d119f7048c18a6f))
* **silo adjustments:** added supervisor to see the actual page so it matches the sidbard ([9aa0b31](https://git.tuffraid.net/cowch/lst/commits/9aa0b31278e5f8201acd21774f19ba69709a654d))
* **silo commits:** added in email that was now missing due to new authj ([25a958d](https://git.tuffraid.net/cowch/lst/commits/25a958d592d189f896ae0b5f7608d80a6ee2b1e7))
* **uom:** correction to how we do the uom check so we dont just look for the space ([ca866bf](https://git.tuffraid.net/cowch/lst/commits/ca866bf8c63e0576e890367d24a47c7ab46cc864))
### 🌟 Enhancements
* **added in swagger:** added the base for swagger to implement fully later ([9d9ca63](https://git.tuffraid.net/cowch/lst/commits/9d9ca63d7c9ab3e3ea168cf2add9c7baf2b9ed15))
* **articles:** moved articles over to the main server ([2a6eafa](https://git.tuffraid.net/cowch/lst/commits/2a6eafa19a97f0be01f63c68b63b4abfc4de1409))
* **barcode gen:** added the missing link ([b4064e8](https://git.tuffraid.net/cowch/lst/commits/b4064e87691937ad9f99441767b556a167b91055))
* **db manual fixes:** added a way to fix manual db changes as needed ([0b02984](https://git.tuffraid.net/cowch/lst/commits/0b0298423ed75eed6d112a04dda998b8a23b20ea))
* **dm:** new endpoint to get the forecast data ([a96b85b](https://git.tuffraid.net/cowch/lst/commits/a96b85bc536809d223dd7a29150d1a4d632e80da))
* **forecast data:** added in a historical forecast data set ([c2ae445](https://git.tuffraid.net/cowch/lst/commits/c2ae445ea4d26b047a2ee5d16041ed230f7b2061))
* **forklifts:** added backend forklift stuff and frontend companies ([50cde2d](https://git.tuffraid.net/cowch/lst/commits/50cde2d8d2aa24796db1f1c0126ef8c373614d5d))
* **forklifts:** added the ability to add new forklifts in ([7b6c9bd](https://git.tuffraid.net/cowch/lst/commits/7b6c9bdfbf2cf9d97c8e23d8ebd6523e32284963))
* **forklifts:** added the crud ([577584e](https://git.tuffraid.net/cowch/lst/commits/577584ef4dd10ee7f57ab0ad0d6261adddaf8966))
* **form stuff:** added in a searchable dropdown and added to new forklifts ([b23bb0d](https://git.tuffraid.net/cowch/lst/commits/b23bb0db31f78f46ffc556577cadb62e0bfa3b83))
* **invoice form:** added new invoice form ([65304f6](https://git.tuffraid.net/cowch/lst/commits/65304f61ceb3ad4655757aa5c291ac4ed77db048))
* **invoices:** added invoice + linking to forklift ([2e05f6e](https://git.tuffraid.net/cowch/lst/commits/2e05f6eeee052a92095098c73ace0bd331c43b22))
* **leases:** added in leases and move table to reuseable component ([bd7bea8](https://git.tuffraid.net/cowch/lst/commits/bd7bea8db697f5b025b8d93f86677a9a69cdf2b4))
* **listeners:** added in a new feature to auto add new listeners ([f9cfada](https://git.tuffraid.net/cowch/lst/commits/f9cfada8409b3a88323dafa80730c5565c067da8))
* **materials per day:** more work on materials per day ([564f0b5](https://git.tuffraid.net/cowch/lst/commits/564f0b5addd109018a806edd6a1fed4399ea63aa))
* **migration:** settings migration from old app all is now in the new app ([40bc19a](https://git.tuffraid.net/cowch/lst/commits/40bc19aa6f952a7a60b5ee8281fa159ca114161f))
* **missing inv:** adding a way to check for missing data in case it dose pull on the correct days ([d17edb1](https://git.tuffraid.net/cowch/lst/commits/d17edb1f9c830a2c17d28bd9180d264607d66fa2))
* **mobile:** ota updates added ([b6030de](https://git.tuffraid.net/cowch/lst/commits/b6030de4f44e73ce8bb9152886d384b9d7f2edff))
* **notify:** material per day for the next 90 days ([c509c7f](https://git.tuffraid.net/cowch/lst/commits/c509c7fe286a43ab0ffbf86635631477237632b5))
* **quality:** added in comments ([566754b](https://git.tuffraid.net/cowch/lst/commits/566754bf2ecfc390bc927b48aadb2fa934353769))
* **quality:** added location moved to to the table ([9572b71](https://git.tuffraid.net/cowch/lst/commits/9572b7159235c18617ff46058c94dfd9cfab8abc))
* **quality:** priority ranking added ([c777395](https://git.tuffraid.net/cowch/lst/commits/c777395b0350f60bd457c3164ed1ae478249df3a))
* **scroll view:** added in a scroll view to for quality so it dose not go over the end of the page ([b0ac326](https://git.tuffraid.net/cowch/lst/commits/b0ac326752331ab01ad981fa7b1022e82beab143))
* **servers:** added a link to the server by clicking on the name and the gp code ([00ef72d](https://git.tuffraid.net/cowch/lst/commits/00ef72de90e43c12bd3fecdc08dfa1e3a4f881fb))
* **settings:** added in dyco printing settings ([2ed6bf4](https://git.tuffraid.net/cowch/lst/commits/2ed6bf4d1f32f9a92712ccb36d4a4146ca112e85))
* **settings:** final migration of settings and edits added ([7e15e5d](https://git.tuffraid.net/cowch/lst/commits/7e15e5d7bcdf58f31bd96564be1f213d01d37cda))
* **start of server:** added the start of server data ([d60c08a](https://git.tuffraid.net/cowch/lst/commits/d60c08a281cd63f2183381a1a19c5e196b41fbc5))
* **templates:** added bug repot template ([79f4121](https://git.tuffraid.net/cowch/lst/commits/79f4121311df733f5dc59b32a6b32c1b4a32f97b))
## [1.7.0](https://git.tuffraid.net/cowch/lst/compare/v1.6.0...v1.7.0) (2025-10-30)

View File

@@ -0,0 +1,20 @@
meta {
name: Get Inv
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/eom/histinv?month=2025/11/1
body: none
auth: inherit
}
params:query {
month: 2025/11/1
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Error logging
type: http
seq: 4
}
get {
url: {{urlv2}}/api/notify/toomanyerrors
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: materialPerDay
type: http
seq: 2
}
get {
url: {{urlv2}}/api/notify/materialperday
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: ti Intergration
type: http
seq: 3
}
get {
url: {{urlv2}}/api/notify/tiTrigger
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,25 @@
meta {
name: Add pallet
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/quality/newrequest
body: json
auth: inherit
}
body:json {
{
"username": "matthes01",
"runningNr": 618302,
"palletStatusText":"return" // returned will be the only allowed key
//"moveTo": "hold area" //hold area, rework, inspection
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Get Pallets
type: http
seq: 1
}
get {
url: {{url}}/lst/old/api/quality/getrequest
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Quality
seq: 7
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,22 @@
meta {
name: sscc
type: http
seq: 4
}
post {
url: {{url}}/lst/old/api/logistics/getsscc
body: json
auth: inherit
}
body:json {
{
"runningNr": ""
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: PSI - Forecast data
type: http
seq: 1
}
get {
url: {{url}}/lst/old/api/datamart/psiforecastdata?customer=8
body: none
auth: inherit
}
params:query {
customer: 8
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,22 @@
meta {
name: PSI -planning data
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/datamart/psiplanningdata?avs=118,120&startDate=12/1/2025&endDate=12/31/2026
body: none
auth: inherit
}
params:query {
avs: 118,120
startDate: 12/1/2025
endDate: 12/31/2026
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: datamart
seq: 8
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,26 @@
meta {
name: Logs
type: http
seq: 2
}
get {
url: {{url}}/lst/old/api/logger/logs?service=ocp&service=rfid&service=dyco&level=error&level=info&level=warn&hours=12
body: none
auth: inherit
}
params:query {
service: ocp
service: rfid
service: dyco
level: error
level: info
level: warn
hours: 12
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,22 @@
meta {
name: Get Invoices
type: http
seq: 4
}
get {
url: {{url}}/lst/api/forklifts/invoices
body: none
auth: inherit
}
body:json {
{
"name":"Delage DLL"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,27 @@
meta {
name: Update lease
type: http
seq: 3
}
patch {
url: {{url}}/lst/api/forklifts/invoices/:id
body: json
auth: inherit
}
params:path {
id: de10c8ee-5756-4efb-9664-3c55338b2b60
}
body:json {
{
"companyId": "b34c6684-ec35-4364-acef-0c1570faf123"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,30 @@
meta {
name: add lease
type: http
seq: 1
}
post {
url: {{url}}/lst/api/forklifts/invoices
body: json
auth: inherit
}
body:json {
{
"leaseId": "0147d082-aee0-4594-b0f4-c6f4ee777e92",
"invoiceNumber": "592596987",
"invoiceDate": "10/12/2025",
"uploadedBy": "matthes01",
"totalAmount": "1820.88",
"forklifts": [
{ "forklift_Id": "ec2f3759-1580-4c1b-8fbf-8a4b0b506758", "amount": 909.91 }
]
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Invoices
seq: 4
}
auth {
mode: inherit
}

View File

@@ -1,6 +1,6 @@
meta {
name: companies
seq: 1
seq: 2
}
auth {

View File

@@ -0,0 +1,22 @@
meta {
name: Get forklift
type: http
seq: 2
}
get {
url: {{url}}/lst/api/forklifts
body: none
auth: inherit
}
body:json {
{
"name":"Delage DLL"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,28 @@
meta {
name: Update forklfit
type: http
seq: 3
}
patch {
url: {{url}}/lst/api/forklifts/:id
body: json
auth: inherit
}
params:path {
id: ec2f3759-1580-4c1b-8fbf-8a4b0b506758
}
body:json {
{
"glCode": 31
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,31 @@
meta {
name: add forklift
type: http
seq: 1
}
post {
url: {{url}}/lst/api/forklifts
body: json
auth: inherit
}
body:json {
{
"serialNumber":"FN682004",
"model": "EFG220",
"plant": "Iowa City ISBM",
"glCode": 31,
"profitCenter": 30,
"manufacturer":"Jungheinrich",
"manufacturerYear":"2022",
"engine":"electric",
"batteryType":"lead acid",
"leaseId":"0147d082-aee0-4594-b0f4-c6f4ee777e92"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: forklifts
seq: 3
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,26 @@
meta {
name: Get lease
type: http
seq: 2
}
get {
url: {{url}}/lst/api/forklifts/leases?companyId=b34c6684-ec35-4364-acef-0c1570faf123
body: none
auth: inherit
}
params:query {
companyId: b34c6684-ec35-4364-acef-0c1570faf123
}
body:json {
{
"name":"Delage DLL"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,27 @@
meta {
name: Update lease
type: http
seq: 3
}
patch {
url: {{url}}/lst/api/forklifts/leases/:id
body: json
auth: inherit
}
params:path {
id: de10c8ee-5756-4efb-9664-3c55338b2b60
}
body:json {
{
"companyId": "b34c6684-ec35-4364-acef-0c1570faf123"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,25 @@
meta {
name: add lease
type: http
seq: 1
}
post {
url: {{url}}/lst/api/forklifts/leases
body: json
auth: inherit
}
body:json {
{
"leaseNumber":"40829107-1",
"startDate": "11/08/2023",
"endDate": "11/12/2025",
"companyId": "59c4eaa3-55db-4348-a033-f2fcd91a91d1"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: lease
seq: 1
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,23 @@
meta {
name: Consume
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/consume
body: json
auth: inherit
}
body:json {
{
"lotNum":283559,
"runningNr":19302907
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: Consume
seq: 5
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,8 @@
meta {
name: demandmgt
seq: 4
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,16 @@
meta {
name: get forecast data
type: http
seq: 1
}
get {
url: {{url}}/lst/api/logistics/dm/forecastData
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -13,12 +13,12 @@ post {
body:json {
{
"scannerId": 999,
"lotNr": 3314,
"machineId": 22, // 457=22, 458=23
"printerId": 22, // 457=22, 458=23
"layoutId": 7,
"numberOfCopies": 0,
"qtyToPrint": 1
"lotNr": 26656,
"machineId": 5, // 457=22, 458=23
"printerId": 7, // 457=22, 458=23
"layoutId": 22,
"numberOfCopies": 1,
"qtyToPrint":0
}
}

View File

@@ -0,0 +1,16 @@
meta {
name: SSCC
type: http
seq: 2
}
get {
url: {{url}}/lst/api/logistics/getsscc
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: Update Setting
type: http
seq: 4
}
post {
url: {{url}}/lst/api/system/settings/:token
body: none
auth: inherit
}
params:path {
token: test3
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -1,6 +1,6 @@
{
"version": "1",
"name": "LogisticsSupportTool_API_DOCS",
"name": "lstv2",
"type": "collection",
"ignore": [
"node_modules",

View File

@@ -1,7 +1,7 @@
vars {
url: http://localhost:5500
session_cookie:
urlv2: http://localhost:3000
urlv2: http://usbow1vms006:3000
jwtV2:
userID:
}

View File

@@ -0,0 +1,24 @@
meta {
name: bookout
type: http
seq: 2
}
post {
url: {{url}}/lst/old/api/logistics/bookout
body: json
auth: none
}
body:json {
{
"runningNr": "1865027",
"reason": "packer printed premature"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: logistics
seq: 7
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,24 @@
meta {
name: relocate
type: http
seq: 1
}
post {
url: {{url}}/lst/old/api/logistics/relocate
body: json
auth: inherit
}
body:json {
{
"runningNr": "56121541",
"laneID": "30006"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,24 @@
meta {
name: removeAsWaste
type: http
seq: 3
}
post {
url: {{url}}/lst/old/api/logistics/removeasreusable
body: json
auth: none
}
body:json {
{
"runningNr": "1865018",
"reason": "validating stockout"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: mobile
seq: 4
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,20 @@
meta {
name: getsession
type: http
seq: 1
}
get {
url: {{url}}/lst/api/user/me
body: none
auth: bearer
}
auth:bearer {
token: jpHHbLNGJRpUMvfrVOYmhbJL2Ux0arse
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,15 @@
meta {
name: ota system check
type: http
seq: 2
}
get {
url: {{url}}/lst/api/mobile
body: none
auth: inherit
}
settings {
encodeUrl: true
}

View File

@@ -0,0 +1,20 @@
meta {
name: otacheck
type: http
seq: 3
}
get {
url: http://10.193.0.56:4000/api/mobile/updates
body: none
auth: inherit
}
headers {
expo-runtime-version: 1.0.0
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,8 @@
meta {
name: v3endpoints
seq: 5
}
auth {
mode: inherit
}

View File

@@ -0,0 +1,16 @@
meta {
name: tester
type: http
seq: 1
}
post {
url: http://localhost:3000/lst/api/system/prodsql/start
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -4,15 +4,15 @@ import { toNodeHandler } from "better-auth/node";
import cors from "cors";
import express from "express";
import { createServer } from "http";
import { createProxyMiddleware, fixRequestBody } from "http-proxy-middleware";
import { createProxyMiddleware } from "http-proxy-middleware";
import morgan from "morgan";
import os from "os";
import { dirname, join } from "path";
import swaggerJsdoc from "swagger-jsdoc";
import swaggerUi from "swagger-ui-express";
import { fileURLToPath } from "url";
import { userMigrate } from "./src/internal/auth/controller/userMigrate.js";
import { schedulerManager } from "./src/internal/logistics/controller/schedulerManager.js";
import { setupMobileRoutes } from "./src/internal/mobile/route.js";
import { printers } from "./src/internal/ocp/printers/printers.js";
import { setupRoutes } from "./src/internal/routerHandler/routeHandler.js";
import { baseModules } from "./src/internal/system/controller/modules/baseModules.js";
@@ -21,7 +21,6 @@ import {
addListeners,
manualFixes,
} from "./src/internal/system/utlis/addListeners.js";
import { swaggerOptions } from "./src/pkg/apiDocs/swaggerOptions.js";
import { auth } from "./src/pkg/auth/auth.js";
import { db } from "./src/pkg/db/db.js";
import { settings } from "./src/pkg/db/schema/settings.js";
@@ -34,6 +33,9 @@ import { sendNotify } from "./src/pkg/utils/notify.js";
import { returnFunc } from "./src/pkg/utils/return.js";
import { tryCatch } from "./src/pkg/utils/tryCatch.js";
import { setupIoServer } from "./src/ws/server.js";
import { swaggerConfig, swaggerUiOptions } from "./src/internal/swagger/config.js";
import { setupSwagger } from "./src/internal/swagger/swagger.js";
const main = async () => {
const env = validateEnv(process.env);
@@ -75,7 +77,8 @@ const main = async () => {
}
// connect to the prod sql
await initializeProdPool();
console.log("Connecting to the sql server");
// express app
const app = express();
@@ -156,24 +159,33 @@ const main = async () => {
},
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
credentials: true,
exposedHeaders: ["set-cookie"],
exposedHeaders: [
"set-cookie",
"expo-protocol-version",
"expo-sfv-version",
],
allowedHeaders: [
"Content-Type",
"Authorization",
"X-Requested-With",
"XMLHttpRequest",
"expo-runtime-version",
"expo-platform",
"expo-channel-name",
"*",
],
}),
);
// docs and routes
const openapiSpec: any = swaggerJsdoc(swaggerOptions);
app.use(
basePath + "/api/docs",
swaggerUi.serve,
swaggerUi.setup(openapiSpec),
);
// const openapiSpec: any = swaggerJsdoc(swaggerConfig);
// app.use(
// basePath + "/api/docs",
// swaggerUi.serve,
// swaggerUi.setup(openapiSpec, swaggerUiOptions),
// );
initializeProdPool();
setupSwagger(app, basePath)
app.use(basePath + "/d", express.static(join(__dirname, "../lstDocs/build")));
app.use(
basePath + "/app",
@@ -207,12 +219,17 @@ const main = async () => {
// start up the v1listener
v1Listener();
addListeners();
userMigrate();
//userMigrate();
// some temp fixes
// above 235 remove these
manualFixes();
//settingsMigrate();
}, 5 * 1000);
// setTimeout(() => {
// startHonoServer();
// }, 8 * 1000);
// start the server up
server.listen(PORT, "0.0.0.0", () =>
log.info(

View File

@@ -24,6 +24,13 @@ router.post("/", async (req: Request, res: Response) => {
.from(user)
.where(eq(user.username, validated.username));
if(userLogin.length === 0 ){
return res.status(200).json({
success: false,
message: `It appears you do not have a user yet please head over to the register page and create a user then try again.`,
});
}
if (
!userLogin[0].lastLogin ||
differenceInDays(userLogin[0].lastLogin, new Date(Date.now())) > 120

View File

@@ -18,7 +18,7 @@ const registerSchema = z.object({
.string()
.min(3)
.max(32)
.regex(/^[a-zA-Z0-9.]+$/, "Only alphanumeric + dots allowed"),
.regex(/^[a-zA-Z0-9._]+$/, "Only alphanumeric + dots allowed"),
displayUsername: z.string().min(2).max(100).optional(), // optional in your API, but supported
});

View File

@@ -0,0 +1,89 @@
import { Router, type Request, type Response } from "express";
import { prodQuery } from "../../../pkg/prodSql/prodQuery.js";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
import { db } from "../../../pkg/db/db.js";
import { settings } from "../../../pkg/db/schema/settings.js";
import { eq } from "drizzle-orm";
import { activeArticle } from "../../../pkg/prodSql/querys/datamart/article.js";
type Articles = {
article: string
description: string
articleType: string
pricePoint:string
salesPrice:string
typeOfMaterial:string
articleIdType:string
articleWeight:string
idAddress:string
addressDescription:string
addressType:string
profitCenter:String
fg: string
num_of_cycles:string
costsCenterId:string
costCenterDescription:string
customerArticleNumber:string
customerArticleDescription:String
cycleTime:string
salesAgreement:string
productFamily:string
uom:string
}
const router = Router();
// GET /health
router.get("/", async (req: Request, res: Response) => {
const includePlantToken = req.params.includePlantToken
let articles:Articles[] = [];
try {
const res = await prodQuery(activeArticle, "Get active articles");
articles = res?.data;
}
catch (error) {
return {
success: false,
message:"Error getting articles",
error: error
};
}
if (includePlantToken) {
const { data, error } = await tryCatch(db.select().from(settings).where(eq(settings.name, "plantToken")))
if (error) {
return {
success: false,
message:"Error getting settings",
error: error
};
}
// return articles.map((n) => {
// return {
// success: true,
// message: "Active articles including plant token",
// data:{ plantToken: data[0].value, ...n }};
// });
return {
success: true,
message: "Active articles including plant token",
data: articles.map((n) => {
return { plantToken: data[0].value, ...n }
})
}
}
else {
return {
success: true,
message: "Active articles including plant token",
data:articles};
}
});
export default router;

View File

@@ -0,0 +1,10 @@
import type { Express, Request, Response } from "express";
//datamart Routes
import getActiveAv from './getActiveAv.js'
export const setupDataMartRoutes = (app: Express, basePath: string) => {
const route = basePath + "/api/datamart"
app.use(route + '/activeArticle', getActiveAv);
//app.use(basePath + "/api/user/me", requireAuth(), me);
};

View File

@@ -33,7 +33,16 @@ router.post("/", async (req: Request, res: Response) => {
upd_user: req.user?.username,
upd_date: sql`NOW()`,
})
//.onConflictDoNothing()
.onConflictDoUpdate({
target: forkliftCompanies.name,
set: {
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
},
})
.returning({
name: forkliftCompanies.name,
}),

View File

@@ -0,0 +1,117 @@
import axios from "axios";
import { type DrizzleError, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import {
forklifts,
newForkliftsSchema,
} from "../../../../pkg/db/schema/forklifts.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.post("/", async (req: Request, res: Response) => {
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "forklift", subModule: "add forklift" });
const parsed = newForkliftsSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ errors: parsed.error.flatten() });
}
const { data, error } = await tryCatch(
db
.insert(forklifts)
.values({
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
})
.onConflictDoUpdate({
target: forklifts.serialNumber,
set: {
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
},
})
.returning({
serialNumber: forklifts.serialNumber,
}),
);
if (error) {
console.log(error);
const err: DrizzleError = error;
return res.status(400).json({
message: `Error adding forklift`,
error: err.cause,
});
}
// if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
// log.info({}, "Running in dev server about to add in a new server");
// const axiosInstance = axios.create({
// httpsAgent: new https.Agent({ rejectUnauthorized: false }),
// baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com"
// withCredentials: true,
// });
// const loginRes = (await axiosInstance.post(
// `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
// {
// username: process.env.MAIN_SERVER_USERNAME,
// password: process.env.MAIN_SERVER_PASSWORD,
// },
// {
// headers: { "Content-Type": "application/json" },
// },
// )) as any;
// const setCookie = loginRes.headers["set-cookie"][0];
// if (!setCookie) {
// throw new Error("Did not receive a Set-Cookie header from login");
// }
// const { data, error } = await tryCatch(
// axios.post(
// `${process.env.MAIN_SERVER}/lst/api/forklifts/companies`,
// parsed.data,
// {
// headers: {
// "Content-Type": "application/json",
// Cookie: setCookie.split(";")[0],
// },
// withCredentials: true,
// },
// ),
// );
// if (error) {
// log.error(
// { stack: error },
// "There was an error adding the company to Main Server",
// );
// }
// log.info(
// { stack: data?.data },
// "A new Company was just added to the server.",
// );
// }
return res
.status(201)
.json({ message: `Forklift ${data[0]?.serialNumber} added`, data: data });
});
export default router;

View File

@@ -0,0 +1,26 @@
import { Router } from "express";
import { requireAuth } from "../../../../pkg/middleware/authMiddleware.js";
import addForklift from "./addForklift.js";
import getForklifts from "./getForklifts.js";
import updateForklift from "./updateForklift.js";
const router = Router();
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin", "manager", "supervisor"]),
getForklifts,
);
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin"]),
addForklift,
);
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin"]),
updateForklift,
);
export default router;

View File

@@ -0,0 +1,38 @@
import { and, asc, eq } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import { db } from "../../../../pkg/db/db.js";
import { forkliftCompanies } from "../../../../pkg/db/schema/forkliftLeaseCompanys.js";
import { leases } from "../../../../pkg/db/schema/forkliftLeases.js";
import { forklifts } from "../../../../pkg/db/schema/forklifts.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const plant = req.query.plant;
const conditions = [];
if (plant !== undefined) {
conditions.push(eq(leases.leaseNumber, `${plant}`));
}
//conditions.push(eq(forkliftCompanies.active, true));
const { data, error } = await tryCatch(
db
.select()
.from(forklifts)
//.innerJoin(forkliftCompanies, eq(forkliftCompanies.id, leases.companyId))
.where(and(...conditions))
.orderBy(asc(forklifts.serialNumber)),
);
if (error) {
return res.status(400).json({ error: error });
}
res.status(200).json({ message: "Current Forklifts", data: data });
});
export default router;

View File

@@ -0,0 +1,136 @@
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { forkliftCompanies } from "../../../../pkg/db/schema/forkliftLeaseCompanys.js";
import { leases } from "../../../../pkg/db/schema/forkliftLeases.js";
import { forklifts } from "../../../../pkg/db/schema/forklifts.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:id", async (req: Request, res: Response) => {
const log = createLogger({
module: "forklifts",
subModule: "update leases",
});
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const id = req.params.id;
const updates: Record<string, any> = {};
if (req.body?.model !== undefined) {
updates.model = req.body.model;
}
if (req.body?.plant !== undefined) {
updates.plant = req.body.plant;
}
if (req.body?.glCode !== undefined) {
updates.glCode = req.body.glCode;
}
if (req.body?.profitCenter !== undefined) {
updates.profitCenter = req.body.profitCenter;
}
if (req.body?.manufacturer !== undefined) {
updates.manufacturer = req.body.manufacturer;
}
if (req.body?.manufacturerYear !== undefined) {
updates.manufacturerYear = req.body.manufacturerYear;
}
if (req.body?.engine !== undefined) {
updates.engine = req.body.engine;
}
if (req.body?.batteryType !== undefined) {
updates.batteryType = req.body.batteryType;
}
if (req.body?.dataPlate !== undefined) {
updates.dataPlate = req.body.dataPlate;
}
if (req.body?.forkliftNumber !== undefined) {
updates.forkliftNumber = req.body.forkliftNumber;
}
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
try {
if (Object.keys(updates).length > 0) {
await db
.update(forklifts)
.set(updates)
.where(eq(forklifts.forklift_id, id));
}
// if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
// log.info({}, "Running in dev server about to add in a new server");
// const axiosInstance = axios.create({
// httpsAgent: new https.Agent({ rejectUnauthorized: false }),
// baseURL: process.env.MAIN_SERVER,
// withCredentials: true,
// });
// const loginRes = (await axiosInstance.post(
// `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
// {
// username: process.env.MAIN_SERVER_USERNAME,
// password: process.env.MAIN_SERVER_PASSWORD,
// },
// {
// headers: { "Content-Type": "application/json" },
// },
// )) as any;
// const setCookie = loginRes?.headers["set-cookie"][0];
// //console.log(setCookie.split(";")[0].replace("__Secure-", ""));
// if (!setCookie) {
// throw new Error("Did not receive a Set-Cookie header from login");
// }
// const { data, error } = await tryCatch(
// axios.patch(
// `${process.env.MAIN_SERVER}/lst/api/forklifts/companies/${id}`,
// updates,
// {
// headers: {
// "Content-Type": "application/json",
// Cookie: setCookie.split(";")[0],
// },
// withCredentials: true,
// },
// ),
// );
// if (error) {
// //console.log(error);
// log.error(
// { stack: error },
// "There was an error updating the lease to Main Server",
// );
// }
// log.info(
// { stack: data?.data },
// "A new lease was just updated to the server.",
// );
// }
res.status(200).json({ message: `${id} was just updated` });
} catch (error) {
//console.log(error);
res.status(200).json({ message: "Error updating lease", error });
}
});
export default router;

View File

@@ -0,0 +1,114 @@
import axios from "axios";
import { type DrizzleError, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import {
insertLeasesCompanySchema,
leases,
} from "../../../../pkg/db/schema/forkliftLeases.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.post("/", async (req: Request, res: Response) => {
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "forklift", subModule: "add lease" });
const parsed = insertLeasesCompanySchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ errors: parsed.error.flatten() });
}
const { data, error } = await tryCatch(
db
.insert(leases)
.values({
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
})
.onConflictDoUpdate({
target: leases.leaseNumber,
set: {
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
},
})
.returning({
leaseNumber: leases.leaseNumber,
}),
);
if (error) {
const err: DrizzleError = error;
return res.status(400).json({
message: `Error adding lease`,
error: err.cause,
});
}
// if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
// log.info({}, "Running in dev server about to add in a new server");
// const axiosInstance = axios.create({
// httpsAgent: new https.Agent({ rejectUnauthorized: false }),
// baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com"
// withCredentials: true,
// });
// const loginRes = (await axiosInstance.post(
// `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
// {
// username: process.env.MAIN_SERVER_USERNAME,
// password: process.env.MAIN_SERVER_PASSWORD,
// },
// {
// headers: { "Content-Type": "application/json" },
// },
// )) as any;
// const setCookie = loginRes.headers["set-cookie"][0];
// if (!setCookie) {
// throw new Error("Did not receive a Set-Cookie header from login");
// }
// const { data, error } = await tryCatch(
// axios.post(
// `${process.env.MAIN_SERVER}/lst/api/forklifts/leases`,
// parsed.data,
// {
// headers: {
// "Content-Type": "application/json",
// Cookie: setCookie.split(";")[0],
// },
// withCredentials: true,
// },
// ),
// );
// if (error) {
// log.error(
// { stack: error },
// "There was an error adding the company to Main Server",
// );
// }
// log.info(
// { stack: data?.data },
// "A new Company was just added to the server.",
// );
// }
return res
.status(201)
.json({ message: `lease ${data[0]?.leaseNumber} added`, data: data });
});
export default router;

View File

@@ -0,0 +1,59 @@
import { and, asc, eq } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import { db } from "../../../../pkg/db/db.js";
import { forkliftCompanies } from "../../../../pkg/db/schema/forkliftLeaseCompanys.js";
import { leases } from "../../../../pkg/db/schema/forkliftLeases.js";
import { forklifts } from "../../../../pkg/db/schema/forklifts.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const conditions = [];
if (req.query.lease !== undefined) {
conditions.push(eq(leases.leaseNumber, `${req.query.lease}`));
}
if (req.query.companyId !== undefined) {
conditions.push(eq(leases.companyId, `${req.query.companyId}`));
}
//conditions.push(eq(forkliftCompanies.active, true));
const { data, error } = (await tryCatch(
db
.select({
id: leases.id,
leaseNumber: leases.leaseNumber,
startDate: leases.startDate,
endDate: leases.endDate,
leaseLink: leases.leaseLink,
companyName: forkliftCompanies.name,
add_user: leases.add_user,
add_date: leases.add_date,
upd_user: leases.upd_user,
upd_date: leases.upd_date,
})
.from(leases)
.innerJoin(forkliftCompanies, eq(forkliftCompanies.id, leases.companyId))
.where(and(...conditions))
.orderBy(asc(leases.leaseNumber)),
)) as any;
// add the forklifts that are in this lease
const forkliftData = await db.select().from(forklifts);
if (error) {
return res.status(400).json({ error: error });
}
const leaseData = data.map((i: any) => ({
...i,
forklifts: forkliftData.filter((x) => x.leaseId === i.id),
}));
res.status(200).json({ message: "Current Leases", data: leaseData });
});
export default router;

View File

@@ -0,0 +1,20 @@
import { Router } from "express";
import { requireAuth } from "../../../../pkg/middleware/authMiddleware.js";
import addHours from "./addHours.js";
import gethours from "./getHours.js";
const router = Router();
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin", "manager", "supervisor"]),
addHours,
);
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin", "manager", "supervisor"]),
gethours,
);
export default router;

View File

@@ -0,0 +1,166 @@
import axios from "axios";
import { type DrizzleError, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import type z from "zod";
import { db } from "../../../../pkg/db/db.js";
import {
leaseInvoiceForklifts,
newForkliftInvoiceSchema,
} from "../../../../pkg/db/schema/forkliftLeasesInvoice.js";
import {
leaseInvoices,
newInvoiceSchema,
} from "../../../../pkg/db/schema/leaseInvoices.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.post("/", async (req: Request, res: Response) => {
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "forklift", subModule: "add invoice" });
const parsedInvoice = newInvoiceSchema.safeParse({
leaseId: req.body.leaseId,
companyId: req.body.companyId,
invoiceNumber: req.body.invoiceNumber,
invoiceDate: req.body.invoiceDate,
comment: req.body.comment,
uploadedBy: req.body.uploadedBy,
totalAmount: req.body.totalAmount,
});
if (!parsedInvoice.success)
return res.status(400).json({ error: parsedInvoice.error.flatten });
const invoiceData = parsedInvoice.data;
const forkliftItems = Array.isArray(req.body.forklifts)
? req.body.forklifts
: [];
const validatedForklifts = []; //z.infer<typeof newForkliftInvoiceSchema>[] = [];
for (const item of forkliftItems) {
// const parsedItem = newForkliftInvoiceSchema.safeParse(item);
// if (parsedItem.success) {
validatedForklifts.push(item);
//} else {
//return res.status(400).json({ error: parsedItem.error.flatten() });
//}
}
// this will be the total invoice amount minus each forklift this way we can keep the total amount in here plus forklifts seperated
// const totalAmount = (
// validatedForklifts.reduce((sum, f) => sum + Number(f.amount || 0), 0) -
// req.body.totalInvoice
// ).toString();
const { data, error } = await tryCatch(
db
.insert(leaseInvoices)
.values({
...invoiceData,
add_date: sql`NOW()`,
totalAmount: req.body.totalAmount,
uploadedBy: req.user!.username || "lst_user",
})
// .onConflictDoUpdate({
// target: leaseInvoices.invoiceNumber,
// set: {
// totalAmount,
// invoiceDate: invoiceData.invoiceDate,
// uploadedBy: req.user!.username || "lst_user",
// },
// })
.returning(),
);
if (error) {
const err: DrizzleError = error;
return res.status(400).json({
message: `Error adding lease`,
// @ts-ignore
error: err.cause.detail,
});
}
const invoiceId = data[0]?.id;
console.log(validatedForklifts);
const forkliftInvoices = validatedForklifts.map((f) => {
return {
invoiceId,
forkliftId: f.forklift_id,
amount: f.amount,
};
});
if (validatedForklifts.length > 0) {
await db.insert(leaseInvoiceForklifts).values(forkliftInvoices);
// .onConflictDoUpdate({
// target: [
// leaseInvoiceForklifts.invoiceId,
// leaseInvoiceForklifts.forkliftId,
// ],
// set: { amount: (excluded) => excluded.amount },
// });
}
// if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
// log.info({}, "Running in dev server about to add in a new server");
// const axiosInstance = axios.create({
// httpsAgent: new https.Agent({ rejectUnauthorized: false }),
// baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com"
// withCredentials: true,
// });
// const loginRes = (await axiosInstance.post(
// `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
// {
// username: process.env.MAIN_SERVER_USERNAME,
// password: process.env.MAIN_SERVER_PASSWORD,
// },
// {
// headers: { "Content-Type": "application/json" },
// },
// )) as any;
// const setCookie = loginRes.headers["set-cookie"][0];
// if (!setCookie) {
// throw new Error("Did not receive a Set-Cookie header from login");
// }
// const { data, error } = await tryCatch(
// axios.post(
// `${process.env.MAIN_SERVER}/lst/api/forklifts/leases`,
// parsed.data,
// {
// headers: {
// "Content-Type": "application/json",
// Cookie: setCookie.split(";")[0],
// },
// withCredentials: true,
// },
// ),
// );
// if (error) {
// log.error(
// { stack: error },
// "There was an error adding the company to Main Server",
// );
// }
// log.info(
// { stack: data?.data },
// "A new Company was just added to the server.",
// );
// }
return res
.status(201)
.json({ message: `lease ${data[0]?.invoiceNumber} added`, data: data });
});
export default router;

View File

@@ -0,0 +1,52 @@
import { and, asc, eq, relations } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import { db } from "../../../../pkg/db/db.js";
import { forkliftCompanies } from "../../../../pkg/db/schema/forkliftLeaseCompanys.js";
import { leases } from "../../../../pkg/db/schema/forkliftLeases.js";
import { forklifts } from "../../../../pkg/db/schema/forklifts.js";
import { leaseInvoices } from "../../../../pkg/db/schema/leaseInvoices.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const invoiceNumber = req.query.lease;
const conditions = [];
if (invoiceNumber !== undefined) {
conditions.push(eq(leaseInvoices.invoiceNumber, `${invoiceNumber}`));
}
//conditions.push(eq(forkliftCompanies.active, true));
const { data, error } = await tryCatch(
db
.select(
// {
// id: leases.id,
// leaseNumber: leases.leaseNumber,
// startDate: leases.startDate,
// endDate: leases.endDate,
// leaseLink: leases.leaseLink,
// companyName: forkliftCompanies.name,
// add_user: leases.add_user,
// add_date: leases.add_date,
// upd_user: leases.upd_user,
// upd_date: leases.upd_date,
// }
)
.from(leaseInvoices)
//.innerJoin(forkliftCompanies, eq(forkliftCompanies.id, leases.companyId))
.where(and(...conditions))
.orderBy(asc(leaseInvoices.invoiceNumber)),
);
if (error) {
return res.status(400).json({ error: error });
}
res.status(200).json({ message: "Current Leases", data: data });
});
export default router;

View File

@@ -0,0 +1,22 @@
import { Router } from "express";
import { requireAuth } from "../../../../pkg/middleware/authMiddleware.js";
import addInvoice from "./addInvoice.js";
import getInvoices from "./getInvoices.js";
import updateInvoice from "./updateInvoices.js";
const router = Router();
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin", "manager"]),
getInvoices,
);
router.use("/", requireAuth("forklifts", ["systemAdmin", "admin"]), addInvoice);
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin"]),
updateInvoice,
);
export default router;

View File

@@ -0,0 +1,114 @@
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { forkliftCompanies } from "../../../../pkg/db/schema/forkliftLeaseCompanys.js";
import { leases } from "../../../../pkg/db/schema/forkliftLeases.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:id", async (req: Request, res: Response) => {
const log = createLogger({
module: "forklifts",
subModule: "update invoice",
});
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const id = req.params.id;
const updates: Record<string, any> = {};
console.log(req.body);
if (req.body?.leaseNumber !== undefined) {
updates.leaseNumber = req.body.leaseNumber;
}
if (req.body?.startDate !== undefined) {
updates.startDate = req.body.startDate;
}
if (req.body?.endDate !== undefined) {
updates.endDate = req.body.endDate;
}
if (req.body?.companyId !== undefined) {
updates.companyId = req.body.companyId;
}
if (req.body?.leaseLink !== undefined) {
updates.leaseLink = req.body.leaseLink;
}
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
console.log(updates);
try {
if (Object.keys(updates).length > 0) {
await db.update(leases).set(updates).where(eq(leases.id, id));
}
// if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
// log.info({}, "Running in dev server about to add in a new server");
// const axiosInstance = axios.create({
// httpsAgent: new https.Agent({ rejectUnauthorized: false }),
// baseURL: process.env.MAIN_SERVER,
// withCredentials: true,
// });
// const loginRes = (await axiosInstance.post(
// `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
// {
// username: process.env.MAIN_SERVER_USERNAME,
// password: process.env.MAIN_SERVER_PASSWORD,
// },
// {
// headers: { "Content-Type": "application/json" },
// },
// )) as any;
// const setCookie = loginRes?.headers["set-cookie"][0];
// //console.log(setCookie.split(";")[0].replace("__Secure-", ""));
// if (!setCookie) {
// throw new Error("Did not receive a Set-Cookie header from login");
// }
// const { data, error } = await tryCatch(
// axios.patch(
// `${process.env.MAIN_SERVER}/lst/api/forklifts/leases/${id}`,
// updates,
// {
// headers: {
// "Content-Type": "application/json",
// Cookie: setCookie.split(";")[0],
// },
// withCredentials: true,
// },
// ),
// );
// if (error) {
// //console.log(error);
// log.error(
// { stack: error },
// "There was an error updating the lease to Main Server",
// );
// }
// log.info(
// { stack: data?.data },
// "A new lease was just updated to the server.",
// );
// }
res.status(200).json({ message: `${id} was just updated` });
} catch (error) {
//console.log(error);
res.status(200).json({ message: "Error updating lease", error });
}
});
export default router;

View File

@@ -0,0 +1,114 @@
import axios from "axios";
import { type DrizzleError, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import {
insertLeasesCompanySchema,
leases,
} from "../../../../pkg/db/schema/forkliftLeases.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.post("/", async (req: Request, res: Response) => {
// when a new server is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there
//res.status(200).json({ message: "Server added", ip: req.hostname });
const log = createLogger({ module: "forklift", subModule: "add lease" });
const parsed = insertLeasesCompanySchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ errors: parsed.error.flatten() });
}
const { data, error } = await tryCatch(
db
.insert(leases)
.values({
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
})
.onConflictDoUpdate({
target: leases.leaseNumber,
set: {
...parsed.data,
add_user: req.user?.username,
add_date: sql`NOW()`,
upd_user: req.user?.username,
upd_date: sql`NOW()`,
},
})
.returning({
leaseNumber: leases.leaseNumber,
}),
);
if (error) {
const err: DrizzleError = error;
return res.status(400).json({
message: `Error adding lease`,
error: err.cause,
});
}
// if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
// log.info({}, "Running in dev server about to add in a new server");
// const axiosInstance = axios.create({
// httpsAgent: new https.Agent({ rejectUnauthorized: false }),
// baseURL: process.env.MAIN_SERVER, // e.g. "https://example.com"
// withCredentials: true,
// });
// const loginRes = (await axiosInstance.post(
// `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
// {
// username: process.env.MAIN_SERVER_USERNAME,
// password: process.env.MAIN_SERVER_PASSWORD,
// },
// {
// headers: { "Content-Type": "application/json" },
// },
// )) as any;
// const setCookie = loginRes.headers["set-cookie"][0];
// if (!setCookie) {
// throw new Error("Did not receive a Set-Cookie header from login");
// }
// const { data, error } = await tryCatch(
// axios.post(
// `${process.env.MAIN_SERVER}/lst/api/forklifts/leases`,
// parsed.data,
// {
// headers: {
// "Content-Type": "application/json",
// Cookie: setCookie.split(";")[0],
// },
// withCredentials: true,
// },
// ),
// );
// if (error) {
// log.error(
// { stack: error },
// "There was an error adding the company to Main Server",
// );
// }
// log.info(
// { stack: data?.data },
// "A new Company was just added to the server.",
// );
// }
return res
.status(201)
.json({ message: `lease ${data[0]?.leaseNumber} added`, data: data });
});
export default router;

View File

@@ -0,0 +1,59 @@
import { and, asc, eq } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import { db } from "../../../../pkg/db/db.js";
import { forkliftCompanies } from "../../../../pkg/db/schema/forkliftLeaseCompanys.js";
import { leases } from "../../../../pkg/db/schema/forkliftLeases.js";
import { forklifts } from "../../../../pkg/db/schema/forklifts.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.get("/", async (req: Request, res: Response) => {
const conditions = [];
if (req.query.lease !== undefined) {
conditions.push(eq(leases.leaseNumber, `${req.query.lease}`));
}
if (req.query.companyId !== undefined) {
conditions.push(eq(leases.companyId, `${req.query.companyId}`));
}
//conditions.push(eq(forkliftCompanies.active, true));
const { data, error } = (await tryCatch(
db
.select({
id: leases.id,
leaseNumber: leases.leaseNumber,
startDate: leases.startDate,
endDate: leases.endDate,
leaseLink: leases.leaseLink,
companyName: forkliftCompanies.name,
add_user: leases.add_user,
add_date: leases.add_date,
upd_user: leases.upd_user,
upd_date: leases.upd_date,
})
.from(leases)
.innerJoin(forkliftCompanies, eq(forkliftCompanies.id, leases.companyId))
.where(and(...conditions))
.orderBy(asc(leases.leaseNumber)),
)) as any;
// add the forklifts that are in this lease
const forkliftData = await db.select().from(forklifts);
if (error) {
return res.status(400).json({ error: error });
}
const leaseData = data.map((i: any) => ({
...i,
forklifts: forkliftData.filter((x) => x.leaseId === i.id),
}));
res.status(200).json({ message: "Current Leases", data: leaseData });
});
export default router;

View File

@@ -0,0 +1,18 @@
import { Router } from "express";
import { requireAuth } from "../../../../pkg/middleware/authMiddleware.js";
import addLeases from "./addLease.js";
import getLeases from "./getLeases.js";
import updateLeases from "./updateLease.js";
const router = Router();
router.use("/", requireAuth("forklifts", ["systemAdmin", "admin"]), getLeases);
router.use("/", requireAuth("forklifts", ["systemAdmin", "admin"]), addLeases);
router.use(
"/",
requireAuth("forklifts", ["systemAdmin", "admin"]),
updateLeases,
);
export default router;

View File

@@ -0,0 +1,114 @@
import axios from "axios";
import { eq, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { forkliftCompanies } from "../../../../pkg/db/schema/forkliftLeaseCompanys.js";
import { leases } from "../../../../pkg/db/schema/forkliftLeases.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:id", async (req: Request, res: Response) => {
const log = createLogger({
module: "forklifts",
subModule: "update leases",
});
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const id = req.params.id;
const updates: Record<string, any> = {};
console.log(req.body);
if (req.body?.leaseNumber !== undefined) {
updates.leaseNumber = req.body.leaseNumber;
}
if (req.body?.startDate !== undefined) {
updates.startDate = req.body.startDate;
}
if (req.body?.endDate !== undefined) {
updates.endDate = req.body.endDate;
}
if (req.body?.companyId !== undefined) {
updates.companyId = req.body.companyId;
}
if (req.body?.leaseLink !== undefined) {
updates.leaseLink = req.body.leaseLink;
}
updates.upd_user = req.user!.username || "lst_user";
updates.upd_date = sql`NOW()`;
console.log(updates);
try {
if (Object.keys(updates).length > 0) {
await db.update(leases).set(updates).where(eq(leases.id, id));
}
// if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
// log.info({}, "Running in dev server about to add in a new server");
// const axiosInstance = axios.create({
// httpsAgent: new https.Agent({ rejectUnauthorized: false }),
// baseURL: process.env.MAIN_SERVER,
// withCredentials: true,
// });
// const loginRes = (await axiosInstance.post(
// `${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
// {
// username: process.env.MAIN_SERVER_USERNAME,
// password: process.env.MAIN_SERVER_PASSWORD,
// },
// {
// headers: { "Content-Type": "application/json" },
// },
// )) as any;
// const setCookie = loginRes?.headers["set-cookie"][0];
// //console.log(setCookie.split(";")[0].replace("__Secure-", ""));
// if (!setCookie) {
// throw new Error("Did not receive a Set-Cookie header from login");
// }
// const { data, error } = await tryCatch(
// axios.patch(
// `${process.env.MAIN_SERVER}/lst/api/forklifts/leases/${id}`,
// updates,
// {
// headers: {
// "Content-Type": "application/json",
// Cookie: setCookie.split(";")[0],
// },
// withCredentials: true,
// },
// ),
// );
// if (error) {
// //console.log(error);
// log.error(
// { stack: error },
// "There was an error updating the lease to Main Server",
// );
// }
// log.info(
// { stack: data?.data },
// "A new lease was just updated to the server.",
// );
// }
res.status(200).json({ message: `${id} was just updated` });
} catch (error) {
//console.log(error);
res.status(200).json({ message: "Error updating lease", error });
}
});
export default router;

View File

@@ -1,9 +1,24 @@
import type { Express, Request, Response } from "express";
import { requireAuth } from "../../../pkg/middleware/authMiddleware.js";
import companies from "./companies/companiesRoutes.js";
import forklifts from "./forklifts/forkliftRoutes.js";
import invoices from "./invoices/invoiceRoutes.js";
import leases from "./leases/leaseRoutes.js";
export const setupForkliftRoutes = (app: Express, basePath: string) => {
app.use(
basePath + "/api/forklifts", // will pass bc system admin but this is just telling us we need this
forklifts,
);
app.use(
basePath + "/api/forklifts/companies", // will pass bc system admin but this is just telling us we need this
companies,
);
app.use(
basePath + "/api/forklifts/leases", // will pass bc system admin but this is just telling us we need this
leases,
);
app.use(
basePath + "/api/forklifts/invoices", // will pass bc system admin but this is just telling us we need this
invoices,
);
};

View File

@@ -12,7 +12,6 @@ import { db } from "../../../../pkg/db/db.js";
import {
type ForecastData,
forecastData,
forecastDataSchema,
} from "../../../../pkg/db/schema/forecastEDIData.js";
import { prodQuery } from "../../../../pkg/prodSql/prodQuery.js";
import { activeArticle } from "../../../../pkg/prodSql/querys/datamart/article.js";
@@ -36,19 +35,21 @@ export const forecastEdiData = async (data: ForecastData[]) => {
for (let i = 0; i < data.length; i++) {
const activeAV = article?.data.filter(
(c: any) =>
c?.CustomerArticleNumber === data[i].customerArticleNo?.toString(),
c?.customerArticleNumber === data[i].customerArticleNo?.toString(),
);
const newData = data[i];
//console.log(activeAV[0].IdArtikelvarianten);
forecaseEDIDATA.push({
...newData,
article: activeAV[0].IdArtikelvarianten,
article: activeAV.length > 0 ? activeAV[0].article : 0,
description:
activeAV.length > 0 ? activeAV[0].description : "No Av Created",
requirementDate: new Date(newData.requirementDate),
});
}
console.log(forecaseEDIDATA[0]);
//console.log(forecaseEDIDATA[0]);
const { data: f, error: ef } = await tryCatch(
db.insert(forecastData).values(forecaseEDIDATA),
);

View File

@@ -69,6 +69,8 @@ const addProdLabel = async (
if (prodLabelError) {
log.error({ error: error }, "Error adding the label");
}
return;
};
export const preprintLabels = async (preprint: Preprint, username?: string) => {
@@ -85,12 +87,12 @@ export const preprintLabels = async (preprint: Preprint, username?: string) => {
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
scannerId: preprint.scannerId.toString(),
lotNr: preprint.lotNr.toString(),
machineId: preprint.machineId.toString(),
printerId: preprint.printerId.toString(),
layoutId: preprint.layoutId.toString(),
numberOfCopies: preprint.numberOfCopies.toString(),
},
);
if (labels?.data.Result === 1) {
@@ -116,7 +118,7 @@ export const preprintLabels = async (preprint: Preprint, username?: string) => {
}
labelsPrinted.push(parseInt(labels?.data.SSCC.slice(10, -1)));
// add the label to our label db for tracking purposes
addProdLabel(
await addProdLabel(
preprint,
parseInt(labels?.data.SSCC.slice(10, -1)),
username || "lst",
@@ -141,12 +143,12 @@ export const preprintLabels = async (preprint: Preprint, username?: string) => {
"POST",
"/public/v1.0/Warehousing/GenerateAndPrintLabel",
{
scannerId: preprint.scannerId,
lotNr: preprint.lotNr,
machineId: preprint.machineId, // 457=22, 458=23
printerId: preprint.printerId, // 457=22, 458=23
layoutId: preprint.layoutId,
numberOfCopies: preprint.numberOfCopies,
scannerId: preprint.scannerId.toString(),
lotNr: preprint.lotNr.toString(),
machineId: preprint.machineId.toString(),
printerId: preprint.printerId.toString(),
layoutId: preprint.layoutId.toString(),
numberOfCopies: preprint.numberOfCopies.toString(),
},
);
@@ -173,7 +175,7 @@ export const preprintLabels = async (preprint: Preprint, username?: string) => {
}
labelsPrinted.push(parseInt(labels.data.SSCC.slice(10, -1)));
addProdLabel(
await addProdLabel(
preprint,
parseInt(labels?.data.SSCC.slice(10, -1)),
username || "lst",

View File

@@ -1,6 +1,9 @@
import type { Request, Response } from "express";
import { Router } from "express";
import z from "zod";
import z, { success } from "zod";
import { db } from "../../../../pkg/db/db.js";
import { forecastData } from "../../../../pkg/db/schema/forecastEDIData.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
import { forecastEdiData } from "../../controller/demandManagement/forecastEDIData.js";
export const Preprint = z.object({
@@ -21,4 +24,19 @@ router.post("/forecastData", async (req: Request, res: Response) => {
res.status(200).json({ success: true, message: "Forecast Data", data: [] });
});
// quick fix for getting the data
router.get("/forecastData", async (req: Request, res: Response) => {
const { data, error } = await tryCatch(db.select().from(forecastData));
if (error) {
return res.status(400).json({
success: false,
message: "Error getting forecast data",
error: error,
});
}
res.status(200).json({ success: true, message: "Forecast Data", data: data });
});
export default router;

View File

@@ -0,0 +1,211 @@
import type { Express, Request, Response } from "express";
import express, { Router } from "express";
import { readdirSync, readFileSync, statSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import crypto from "crypto";
import fs from "fs";
export const setupMobileRoutes = (app: Express, basePath: string) => {
const router = Router();
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const distPath = join(__dirname, "../../../../mobileLst/dist");
function generateAssetManifest(baseUrl: string) {
const assets: any[] = [];
const assetsDir = join(distPath, "assets");
try {
if (!fs.existsSync(assetsDir)) {
return assets;
}
const files = readdirSync(assetsDir);
files.forEach((file) => {
const filePath = join(assetsDir, file);
const stats = statSync(filePath);
if (stats.isFile()) {
const content = readFileSync(filePath);
const hash = crypto
.createHash("sha256")
.update(content)
.digest("hex");
assets.push({
hash: hash,
key: file,
fileExtension: `.${file.split(".").pop()}`,
contentType: getContentType(file),
url: `${baseUrl}/assets/${file}`,
});
}
});
} catch (err) {
console.log("Error reading assets:", err);
}
return assets;
}
function getContentType(filename: string): string {
const ext = filename.split(".").pop()?.toLowerCase();
const contentTypes: { [key: string]: string } = {
hbc: "application/javascript",
bundle: "application/javascript",
js: "application/javascript",
json: "application/json",
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
ttf: "font/ttf",
otf: "font/otf",
woff: "font/woff",
woff2: "font/woff2",
};
return contentTypes[ext || ""] || "application/octet-stream";
}
app.get(basePath + "/api/mobile/updates", (req, res) => {
console.log("=== OTA Update Request ===");
console.log("Headers:", JSON.stringify(req.headers, null, 2));
const runtimeVersion = req.headers["expo-runtime-version"];
const platform = req.headers["expo-platform"] || "android";
const expectedRuntimeVersion = "1.0.0";
if (runtimeVersion !== expectedRuntimeVersion) {
console.log(
`Runtime mismatch: got ${runtimeVersion}, expected ${expectedRuntimeVersion}`
);
return res.status(404).json({
error: "No update available for this runtime version",
requestedVersion: runtimeVersion,
availableVersion: expectedRuntimeVersion,
});
}
try {
// const host = req.get('host');
// // If it's the production domain, force https
// const protocol = host.includes('alpla.net') ? 'https' : req.protocol;
// const baseUrl = `${protocol}://${host}/lst/api/mobile/updates`
const host = req.get('host'); // Should be "usmcd1vms036:4000"
const protocol = 'http';
const baseUrl = `${protocol}://${host}/api/mobile/updates`;
// Find the .hbc file
const bundleDir = join(distPath, "_expo/static/js/android");
if (!fs.existsSync(bundleDir)) {
console.error("Bundle directory does not exist:", bundleDir);
return res
.status(500)
.json({ error: "Bundle directory not found" });
}
const bundleFiles = readdirSync(bundleDir);
console.log("Available bundle files:", bundleFiles);
const bundleFile = bundleFiles.find((f) => f.endsWith(".hbc"));
if (!bundleFile) {
console.error("No .hbc file found in:", bundleDir);
return res
.status(500)
.json({ error: "Hermes bundle (.hbc) not found" });
}
console.log("Using bundle file:", bundleFile);
const bundlePath = join(bundleDir, bundleFile);
const bundleContent = readFileSync(bundlePath);
const bundleHash = crypto
.createHash("sha256")
.update(bundleContent)
.digest("hex");
const updateId = crypto.randomUUID();
const createdAt = new Date().toISOString();
// This is the NEW manifest format for Expo SDK 50+
const manifest = {
id: updateId,
createdAt: createdAt,
runtimeVersion: expectedRuntimeVersion,
launchAsset: {
hash: bundleHash,
key: bundleFile,
contentType: "application/javascript",
fileExtension: ".hbc",
url: `${baseUrl}/_expo/static/js/android/${bundleFile}`,
},
assets: generateAssetManifest(baseUrl),
metadata: {},
extra: {
expoClient: {
name: "LSTScanner",
slug: "lst-scanner-app",
version: "1.0.0",
runtimeVersion: expectedRuntimeVersion,
},
},
};
console.log(
"Returning manifest:",
JSON.stringify(manifest, null, 2)
);
res.setHeader("Content-Type", "application/json");
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("expo-protocol-version", "1");
res.setHeader("expo-sfv-version", "0");
res.json(manifest);
} catch (error: any) {
console.error("Error generating manifest:", error);
res.status(500).json({
error: "Failed to generate manifest",
details: error.message,
stack: error.stack,
});
}
});
// Serve static files
app.use(
basePath + "/api/mobile/updates",
express.static(distPath, {
setHeaders(res, path) {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Cache-Control", "public, max-age=31536000");
if (path.endsWith(".hbc")) {
res.setHeader("Content-Type", "application/javascript");
}
},
})
);
// app.use(
// basePath + "/api/mobile/updates",
// express.static(join(__dirname, mobileDir), {
// setHeaders(res) {
// // OTA runtime needs to fetch these from the device
// console.log("OTA check called");
// res.setHeader("Access-Control-Allow-Origin", "*");
// },
// })
// );
// app.get(basePath + "/api/mobile/updates", (req, res) => {
// res.redirect(basePath + "/api/mobile/updates/metadata.json");
// });
app.get(basePath + "/api/mobile", (_, res) =>
res.status(200).json({ message: "LST OTA server is up." })
);
};

View File

@@ -4,22 +4,26 @@ import { setupAuthRoutes } from "../auth/routes/routes.js";
import { setupForkliftRoutes } from "../forklifts/routes/routes.js";
import { setupLogisticsRoutes } from "../logistics/routes.js";
import { setupSystemRoutes } from "../system/routes.js";
import { setupMobileRoutes } from "../mobile/route.js";
import { setupDataMartRoutes } from "../datamart/routes/routes.js";
export const setupRoutes = (app: Express, basePath: string) => {
// all routes
setupAuthRoutes(app, basePath);
setupAdminRoutes(app, basePath);
setupSystemRoutes(app, basePath);
setupLogisticsRoutes(app, basePath);
setupForkliftRoutes(app, basePath);
// all routes
setupAuthRoutes(app, basePath);
setupAdminRoutes(app, basePath);
setupSystemRoutes(app, basePath);
setupLogisticsRoutes(app, basePath);
setupForkliftRoutes(app, basePath);
setupMobileRoutes(app, basePath);
setupDataMartRoutes(app, basePath)
// always try to go to the app weather we are in dev or in production.
app.get(basePath + "/", (req: Request, res: Response) => {
res.redirect(basePath + "/app");
});
// always try to go to the app weather we are in dev or in production.
app.get(basePath + "/", (req: Request, res: Response) => {
res.redirect(basePath + "/app");
});
// Fallback 404 handler
app.use((req: Request, res: Response) => {
res.status(404).json({ error: "Not Found" });
});
// Fallback 404 handler
app.use((req: Request, res: Response) => {
res.status(404).json({ error: "Not Found" });
});
};

View File

@@ -0,0 +1,59 @@
export const swaggerUiOptions = {
explorer: true,
customCss: ".swagger-ui .topbar { display: none }",
customSiteTitle: "LST API Documentation",
swaggerOptions: {
persistAuthorization: true,
displayRequestDuration: true,
filter: true,
syntaxHighlight: {
activate: true,
theme: "monokai",
},
},
};
export const swaggerConfig = {
definition: {
openapi: "3.0.0",
info: {
title: "Logistics Support Tool",
version: "1.8.0",
description: "Complete API documentation for lst",
contact: {
name: "API Support",
email: "blake.matthes@alpla.com",
},
},
servers: [
{
url: "http://localhost:4200",
description: "Development server",
},
{
url: "https://api.yourapp.com",
description: "Production server",
},
],
components: {
securitySchemes: {
bearerAuth: {
type: "http",
scheme: "bearer",
bearerFormat: "JWT",
},
apiKey: {
type: "apiKey",
in: "header",
name: "X-API-Key",
},
},
},
security: [
{
bearerAuth: [],
},
],
},
apis: [], // We'll populate this dynamically
};

View File

@@ -0,0 +1,129 @@
const loginEndpoint = {
'/lst/api/user/login': {
post: {
tags: ['Authentication'],
summary: 'Login to get a token',
description: 'User enters username and password, gets back a JWT token and session data',
// What the user sends you
requestBody: {
required: true,
content: {
'application/json': {
schema: {
type: 'object',
required: ['username', 'password'],
properties: {
username: {
type: 'string',
example: 'smith01'
},
password: {
type: 'string',
example: 'MyPassword123'
}
}
}
}
}
},
// What you send back to the user
responses: {
// SUCCESS - Login worked
200: {
description: 'Login successful',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
success: {
type: 'boolean',
example: true
},
message: {
type: 'string',
example: 'Login successful'
},
data: {
type: 'object',
properties: {
token: {
type: 'string',
example: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...'
},
user: {
type: 'object',
properties: {
id: {
type: 'string',
example: '12345'
},
email: {
type: 'string',
example: 'user@example.com'
},
username: {
type: 'string',
example: 'johndoe'
}
}
}
}
}
}
}
}
}
},
// ERROR - Wrong password or email
401: {
description: 'Wrong email or password',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
success: {
type: 'boolean',
example: false
},
message: {
type: 'string',
example: 'Invalid credentials'
}
}
}
}
}
},
// ERROR - Missing fields
400: {
description: 'Missing email or password',
content: {
'application/json': {
schema: {
type: 'object',
properties: {
success: {
type: 'boolean',
example: false
},
message: {
type: 'string',
example: 'Email and password are required'
}
}
}
}
}
}
}
}
}
};
export default loginEndpoint;

View File

@@ -0,0 +1,31 @@
import swaggerJsdoc from 'swagger-jsdoc';
import swaggerUi from 'swagger-ui-express';
import { swaggerConfig, swaggerUiOptions } from './config.js';
import { type Express } from 'express';
import loginEndpoint from './endpoints/auth/login.js';
const allPaths = {
...loginEndpoint,
// When you add more endpoints, add them here:
// ...registerEndpoint,
// ...logoutEndpoint,
};
const swaggerSpec = {
...swaggerConfig.definition,
paths: allPaths
};
const specs = swaggerJsdoc({
...swaggerConfig,
definition: swaggerSpec
});
export function setupSwagger(app: Express, basePath: string): void {
// Swagger UI at /api-docs
app.use(basePath + "/api/docs", swaggerUi.serve, swaggerUi.setup(specs, swaggerUiOptions));
//console.log('📚 Swagger docs at http://localhost:3000/api-docs');
}

View File

@@ -100,8 +100,8 @@
"category": "quality",
"active": false,
"icon": "",
"link": "",
"roles": ["admin", "systemAdmin", "manager", "viewer", "tester"]
"link": "/lst/app/old/quality",
"roles": ["admin", "systemAdmin", "manager", "supervisor", "tester"]
},
{
"name": "eom",
@@ -118,5 +118,13 @@
"icon": "",
"link": "/lst/app/old/forklifts",
"roles": ["admin", "systemAdmin", "manager", "tester"]
},
{
"name": "barcodes",
"category": "logistics",
"active": true,
"icon": "Barcode",
"link": "/lst/app/old/barcodegen",
"roles": ["admin", "systemAdmin", "manager", "tester", "viewer"]
}
]

View File

@@ -5,66 +5,31 @@ import { Router } from "express";
import https from "https";
import { db } from "../../../../pkg/db/db.js";
import { serverData } from "../../../../pkg/db/schema/servers.js";
import { settings } from "../../../../pkg/db/schema/settings.js";
import { createLogger } from "../../../../pkg/logger/logger.js";
import { tryCatch } from "../../../../pkg/utils/tryCatch.js";
const router = Router();
router.patch("/:token", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update server" });
router.patch("/:id", async (req: Request, res: Response) => {
const log = createLogger({ module: "admin", subModule: "update setting" });
// when a server is updated and is posted from localhost or 127.0.0.1 we also want to post it to the test server so we can see it from there, we want to insert with update on conflict.
const token = req.params.token;
const id = req.params.id;
const updates: Record<string, any> = {};
if (req.body?.name !== undefined) {
updates.name = req.body.name;
}
if (req.body?.serverDNS !== undefined) {
updates.serverDNS = req.body.serverDNS;
if (req.body?.value !== undefined) {
updates.value = req.body.value;
}
if (req.body?.ipAddress !== undefined) {
updates.ipAddress = req.body.ipAddress;
if (req.body?.description !== undefined) {
updates.description = req.body.description;
}
if (req.body?.greatPlainsPlantCode !== undefined) {
updates.greatPlainsPlantCode = req.body.greatPlainsPlantCode;
}
if (req.body?.lstServerPort !== undefined) {
updates.lstServerPort = req.body.lstServerPort;
}
if (req.body?.serverLoc !== undefined) {
updates.serverLoc = req.body.serverLoc;
}
if (req.body?.streetAddress !== undefined) {
updates.streetAddress = req.body.streetAddress;
}
if (req.body?.cityState !== undefined) {
updates.cityState = req.body.cityState;
}
if (req.body?.zipcode !== undefined) {
updates.zipcode = req.body.zipcode;
}
if (req.body?.contactEmail !== undefined) {
updates.contactEmail = req.body.contactEmail;
}
if (req.body?.contactPhone !== undefined) {
updates.contactPhone = req.body.contactPhone;
}
if (req.body?.customerTiAcc !== undefined) {
updates.customerTiAcc = req.body.customerTiAcc;
}
if (req.body?.active !== undefined) {
updates.active = req.body.active;
if (req.body?.moduleName !== undefined) {
updates.moduleName = req.body.moduleName;
}
updates.upd_user = req.user!.username || "lst_user";
@@ -73,65 +38,12 @@ router.patch("/:token", async (req: Request, res: Response) => {
try {
if (Object.keys(updates).length > 0) {
await db
.update(serverData)
.update(settings)
.set(updates)
.where(eq(serverData.plantToken, token));
.where(eq(settings.settings_id, id));
}
if (req.hostname === "localhost" && process.env.MAIN_SERVER) {
log.info({}, "Running in dev server about to add in a new server");
const axiosInstance = axios.create({
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
baseURL: process.env.MAIN_SERVER,
withCredentials: true,
});
const loginRes = (await axiosInstance.post(
`${process.env.MAIN_SERVER}/lst/api/auth/sign-in/username`,
{
username: process.env.MAIN_SERVER_USERNAME,
password: process.env.MAIN_SERVER_PASSWORD,
},
{
headers: { "Content-Type": "application/json" },
},
)) as any;
const setCookie = loginRes?.headers["set-cookie"][0];
//console.log(setCookie.split(";")[0].replace("__Secure-", ""));
if (!setCookie) {
throw new Error("Did not receive a Set-Cookie header from login");
}
const { data, error } = await tryCatch(
axios.patch(
`${process.env.MAIN_SERVER}/lst/api/admin/server/${token}`,
updates,
{
headers: {
"Content-Type": "application/json",
Cookie: setCookie.split(";")[0],
},
withCredentials: true,
},
),
);
if (error) {
console.log(error);
log.error(
{ stack: error },
"There was an error adding the server to Main Server",
);
}
log.info(
{ stack: data?.data },
"A new Server was just added to the server.",
);
}
res.status(200).json({ message: `${token} Server was just updated` });
res.status(200).json({ message: `Setting was just updated` });
} catch (error) {
console.log(error);
res.status(400).json({ message: "Error Server updated", error });

View File

@@ -58,6 +58,8 @@ router.get("/", async (req, res) => {
memoryUsage: `Heap: ${(used.heapUsed / 1024 / 1024).toFixed(2)} MB / RSS: ${(
used.rss / 1024 / 1024
).toFixed(2)} MB`,
eomFGPkgSheetVersion: 1, // this is the excel file version when we have a change to the macro we want to grab this
masterMacroFile: 1, // this is the excel file version when we have a change to the macro we want to grab this
});
});

View File

@@ -1,5 +1,8 @@
import { Client } from "pg";
import { db } from "../../../pkg/db/db.js";
import { type NewSetting, settings } from "../../../pkg/db/schema/settings.js";
import { createLogger } from "../../../pkg/logger/logger.js";
import { tryCatch } from "../../../pkg/utils/tryCatch.js";
export const addListeners = async () => {
const log = createLogger({ module: "utils", subModule: "listeners" });
@@ -60,9 +63,8 @@ export const addListeners = async () => {
}
};
// all the migration stuff that will need to be moved later build 230 and above will need to remove
export const manualFixes = async () => {
const fixQuery = `ALTER TABLE "serverData" ADD CONSTRAINT "serverData_name_unique" UNIQUE("name");`;
const log = createLogger({ module: "utils", subModule: "manual fixes" });
const client = new Client({
connectionString: `postgresql://${process.env.DATABASE_USER}:${process.env.DATABASE_PASSWORD}@${process.env.DATABASE_HOST}:${process.env.DATABASE_PORT}/${process.env.DATABASE_DB}`,
@@ -70,10 +72,59 @@ export const manualFixes = async () => {
await client.connect();
/**
* The fix to correct the constraint on the server data
*/
// const fixQuery = `ALTER TABLE "serverData" ADD CONSTRAINT "serverData_name_unique" UNIQUE("name");`;
// try {
// log.info({}, "Running the manual fix");
// await client.query(fixQuery);
// } catch (e) {
// log.info({ error: e }, "Fix was not completed");
// }
};
export const settingsMigrate = async () => {
const log = createLogger({ module: "utils", subModule: "v1Migration" });
const client = new Client({
connectionString: process.env.DATABASE_URL_V1,
});
await client.connect();
let settingsV1: NewSetting[] = [];
try {
log.info({}, "Running the manual fix");
await client.query(fixQuery);
const s = await client.query("SELECT * FROM settings");
settingsV1 = s.rows.map((i) => {
return {
name: i.name,
value: i.value,
description: i.description,
moduleName: i.moduleName,
};
});
} catch (e) {
log.info({ error: e }, "Fix was not completed");
log.error({ error: e }, "There was an error getting the settings.");
}
const { data, error } = await tryCatch(
db
.insert(settings)
.values(settingsV1)
.onConflictDoNothing()
.returning({ name: settings.name }),
);
if (error) {
log.error({ error }, "There was an error adding new settings");
}
if (data) {
log.info({ newSettingsAdded: data }, "New settings added");
}
};
// migrations after 230 go below here so we can keep this inline.

View File

@@ -1,11 +0,0 @@
export const swaggerOptions = {
definition: {
openapi: "3.0.0",
info: {
title: "Logistics Support Tool",
version: "1.0.0",
},
},
// globs where swagger-jsdoc should look for annotations:
apis: ["../../src/**/*.ts"],
};

View File

@@ -18,6 +18,7 @@ export const forecastData = pgTable("forecast_Data", {
quantity: real("quantity"),
requirementDate: timestamp("requirement_date").notNull(),
article: integer("article"),
description: text("description"),
createdAt: timestamp("created_at").defaultNow(),
});

View File

@@ -1,13 +1,26 @@
import { date, pgTable, text, uuid } from "drizzle-orm/pg-core";
import { createSelectSchema } from "drizzle-zod";
import { date, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import z from "zod";
import { forkliftCompanies } from "./forkliftLeaseCompanys.js";
export const leases = pgTable("leases", {
id: uuid("id").defaultRandom().primaryKey(),
leaseNumber: text("lease_number").notNull(),
leaseNumber: text("lease_number").unique().notNull(),
companyId: uuid("company_id").references(() => forkliftCompanies.id),
startDate: date("start_date"),
endDate: date("end_date"),
leaseLink: text("lease_link"),
add_date: timestamp("add_date").defaultNow(),
add_user: text("add_user").default("LST"),
upd_date: timestamp("upd_date").defaultNow(),
upd_user: text("upd_user").default("LST"),
});
export const selectLeasesDataSchema = createSelectSchema(leases);
export const insertLeasesCompanySchema = createInsertSchema(leases).extend({
leaseNumber: z.string().min(3),
// zipcode: z
// .string()
// .regex(/^\d{5}$/)
// .optional(),
});

View File

@@ -1,9 +1,10 @@
import { numeric, pgTable, serial, uuid } from "drizzle-orm/pg-core";
import { numeric, pgTable, serial, text, uuid } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { forklifts } from "./forklifts.js";
import { leaseInvoices } from "./leaseInvoices.js";
export const leaseInvoiceForklifts = pgTable("lease_invoice_forklifts", {
id: serial("id").primaryKey(),
id: uuid("id").defaultRandom().primaryKey(),
invoiceId: uuid("invoice_id")
.notNull()
.references(() => leaseInvoices.id, { onDelete: "cascade" }),
@@ -12,3 +13,7 @@ export const leaseInvoiceForklifts = pgTable("lease_invoice_forklifts", {
.references(() => forklifts.forklift_id, { onDelete: "cascade" }),
amount: numeric("amount"), // optional: amount of invoice allocated to this lift
});
export const newForkliftInvoiceSchema = createInsertSchema(
leaseInvoiceForklifts,
);

View File

@@ -23,7 +23,7 @@ const status = pgEnum("forklift_status", [
export const forklifts = pgTable("forklifts", {
forklift_id: uuid("forklift_id").defaultRandom().primaryKey(),
forkliftNumber: serial("forklift_number").notNull(),
serialNumber: text("serial_number").notNull(),
serialNumber: text("serial_number").unique().notNull(),
model: text("model").notNull(),
plant: text("plant")
.notNull()
@@ -41,8 +41,8 @@ export const forklifts = pgTable("forklifts", {
dataPlate: text("data_plate"),
add_date: timestamp("add_date").defaultNow(),
add_user: text("add_user").default("LST"),
upd_date: timestamp("add_date").defaultNow(),
upd_user: text("add_user").default("LST"),
upd_date: timestamp("upd_date").defaultNow(),
upd_user: text("upd_user").default("LST"),
});
export const forkliftsSchema = createSelectSchema(forklifts);

View File

@@ -1,19 +1,25 @@
import { date, numeric, pgTable, text, uuid } from "drizzle-orm/pg-core";
import { forkliftCompanies } from "./forkliftLeaseCompanys.js";
import {
date,
numeric,
pgTable,
text,
timestamp,
uuid,
} from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { leases } from "./forkliftLeases.js";
import { forklifts } from "./forklifts.js";
export const leaseInvoices = pgTable("lease_invoices", {
id: uuid("id").defaultRandom().primaryKey(),
leaseId: uuid("lease_id")
.notNull()
.references(() => leases.id, { onDelete: "cascade" }),
companyId: uuid("company_id").references(() => forkliftCompanies.id),
invoiceNumber: text("invoice_number").notNull(),
invoiceNumber: text("invoice_number").unique().notNull(),
invoiceDate: date("invoice_date").notNull(),
forkliftId: uuid("forklift_id")
.notNull()
.references(() => forklifts.forklift_id, { onDelete: "cascade" }),
totalAmount: numeric("total_amount"),
comment: text("comment"),
add_date: timestamp("add_date"),
uploadedBy: text("uploaded_by"),
});
export const newInvoiceSchema = createInsertSchema(leaseInvoices);

View File

@@ -1,6 +1,12 @@
import { returnFunc } from "../utils/return.js";
import { connected, pool } from "./prodSqlConnect.js";
import { validateEnv } from "../utils/envValidator.js";
import { returnFunc } from "../utils/return.js";
import {
closePool,
connected,
pool,
reconnecting,
reconnectToSql,
} from "./prodSqlConnect.js";
const env = validateEnv(process.env);
/**
@@ -11,48 +17,65 @@ const env = validateEnv(process.env);
* You must use test1 always as it will be changed via query
*/
export async function prodQuery(queryToRun: string, name: string) {
if (!connected) {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `The sql ${env.PROD_PLANT_TOKEN} is not connected`,
notify: false,
data: [],
});
}
const query = queryToRun.replaceAll("test1", env.PROD_PLANT_TOKEN);
try {
const result = await pool.request().query(query);
return {
success: true,
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
console.log(error);
if (error.code === "ETIMEOUT") {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} did not run due to a timeout.`,
notify: false,
data: [error],
});
}
if (!connected) {
reconnectToSql();
if (error.code === "EREQUEST") {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} encountered an error ${error.originalError.info.message}`,
data: [],
});
}
}
if (reconnecting) {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `The sql ${env.PROD_PLANT_TOKEN} is trying to reconnect already`,
notify: false,
data: [],
});
} else {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `The sql ${env.PROD_PLANT_TOKEN} is not connected`,
notify: false,
data: [],
});
}
}
const query = queryToRun.replaceAll("test1", env.PROD_PLANT_TOKEN);
try {
const result = await pool.request().query(query);
return {
success: true,
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
console.log(error);
if (error.code === "ETIMEOUT") {
closePool();
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} did not run due to a timeout.`,
notify: false,
data: [error],
});
}
if (error.code === "EREQUEST") {
closePool();
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} encountered an error ${error.originalError.info.message}`,
data: [],
});
}
}
}

View File

@@ -1,136 +1,134 @@
import sql from "mssql";
import { checkHostnamePort } from "../utils/checkHostNamePort.js";
import { sqlConfig } from "./prodSqlConfig.js";
import { createLogger } from "../logger/logger.js";
import { returnFunc } from "../utils/return.js";
import { checkHostnamePort } from "../utils/checkHostNamePort.js";
import { validateEnv } from "../utils/envValidator.js";
import { returnFunc } from "../utils/return.js";
import { sqlConfig } from "./prodSqlConfig.js";
const env = validateEnv(process.env);
export let pool: any;
export let connected: boolean = false;
let reconnecting = false;
export let reconnecting = false;
export const initializeProdPool = async () => {
const log = createLogger({ module: "prodSql" });
const log = createLogger({ module: "prodSql" });
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
if (!serverUp) {
reconnectToSql();
return returnFunc({
success: false,
module: "prodSql",
level: "fatal",
message: `The sql ${env.PROD_SERVER} is not reachable`,
data: [],
});
}
if (!serverUp) {
reconnectToSql();
return returnFunc({
success: false,
module: "prodSql",
level: "fatal",
message: `The sql ${env.PROD_SERVER} is not reachable`,
data: [],
});
}
// if you were restarting from the endpoint you get this lovely error
if (connected) {
return returnFunc({
success: false,
module: "prodSql",
level: "error",
message: `There is already a connection to ${env.PROD_PLANT_TOKEN}`,
data: [],
});
}
try {
pool = await sql.connect(sqlConfig);
// if you were restarting from the endpoint you get this lovely error
if (connected) {
return returnFunc({
success: false,
module: "prodSql",
level: "error",
message: `There is already a connection to ${env.PROD_PLANT_TOKEN}`,
data: [],
});
}
try {
pool = await sql.connect(sqlConfig);
log.info(
`Connected to ${sqlConfig?.server}, using DB: ${sqlConfig?.database}`
);
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(
error
)}, "There was an error connecting to the pool."`
);
reconnectToSql();
// throw new Error("There was an error closing the sql connection");
}
log.info(
`Connected to ${sqlConfig?.server}, using DB: ${sqlConfig?.database}`,
);
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(error)}, "There was an error connecting to the pool."`,
);
reconnectToSql();
// throw new Error("There was an error closing the sql connection");
}
};
const reconnectToSql = async () => {
const log = createLogger({ module: "prodSql" });
if (reconnecting) return;
reconnecting = true;
export const reconnectToSql = async () => {
const log = createLogger({ module: "prodSql" });
if (reconnecting) return;
reconnecting = true;
let delay = 2000; // start at 2s
let attempts = 0;
const maxAttempts = 10; // or limit by time, e.g. 2 min total
let delay = 2000; // start at 2s
let attempts = 0;
const maxAttempts = 10; // or limit by time, e.g. 2 min total
while (!connected && attempts < maxAttempts) {
attempts++;
log.info(
`Reconnect attempt ${attempts}/${maxAttempts} in ${
delay / 1000
}s...`
);
while (!connected && attempts < maxAttempts) {
attempts++;
log.info(
`Reconnect attempt ${attempts}/${maxAttempts} in ${delay / 1000}s...`,
);
await new Promise((res) => setTimeout(res, delay));
await new Promise((res) => setTimeout(res, delay));
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
if (!serverUp) {
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
continue;
}
if (!serverUp) {
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
continue;
}
try {
pool = sql.connect(sqlConfig);
try {
pool = sql.connect(sqlConfig);
log.info(
`Connected to ${sqlConfig?.server}, and looking at ${sqlConfig?.database}`
);
reconnecting = false;
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(
error
)}, "There was an error connecting to the pool."`
);
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
// throw new Error("There was an error closing the sql connection");
}
}
log.info(
`Connected to ${sqlConfig?.server}, and looking at ${sqlConfig?.database}`,
);
reconnecting = false;
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(
error,
)}, "There was an error connecting to the pool."`,
);
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
// throw new Error("There was an error closing the sql connection");
}
}
if (!connected) {
log.fatal(
{ notify: true },
"Max reconnect attempts reached on the prodSql server. Stopping retries."
);
reconnecting = false;
// optional: exit process or alert someone here
// process.exit(1);
}
if (!connected) {
log.fatal(
{ notify: true },
"Max reconnect attempts reached on the prodSql server. Stopping retries.",
);
reconnecting = false;
// exit process or alert someone here
// process.exit(1);
}
};
export const closePool = async () => {
const log = createLogger({ module: "prodSql" });
if (!connected) {
log.error("There is no connection a connection.");
return { success: false, message: "There is already a connection." };
}
try {
await pool.close();
log.info("Connection pool closed");
connected = false;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
log.fatal(
{ notify: true },
`${JSON.stringify(
error
)}, "There was an error closing the sql connection"`
);
}
const log = createLogger({ module: "prodSql" });
if (!connected) {
log.error("There is no connection a connection.");
return { success: false, message: "There is already a connection." };
}
try {
await pool.close();
log.info("Connection pool closed");
connected = false;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
connected = false;
log.info(
//{ notify: true },
{ error: error },
`${JSON.stringify(
error,
)}, "There was an error closing the sql connection"`,
);
}
};

View File

@@ -1,12 +1,12 @@
export const activeArticle = `
use AlplaPROD_test1
SELECT V_Artikel.IdArtikelvarianten,
V_Artikel.Bezeichnung,
V_Artikel.ArtikelvariantenTypBez,
V_Artikel.PreisEinheitBez,
SELECT V_Artikel.IdArtikelvarianten as article,
V_Artikel.Bezeichnung as description,
V_Artikel.ArtikelvariantenTypBez as articleType,
V_Artikel.PreisEinheitBez as pricePoint,
case when sales.price is null then 0 else sales.price end as salesPrice,
TypeOfMaterial=CASE
CASE
WHEN
V_Artikel.ArtikelvariantenTypBez LIKE'%Additive'
Then 'AD'
@@ -90,14 +90,15 @@ THEN 'Caps'
When
V_Artikel.ArtikelvariantenTypBez = 'Dummy'
THEN 'Not used'
ELSE 'Item not defined' END
,V_Artikel.IdArtikelvariantenTyp,
Round(V_Artikel.ArtikelGewicht, 3) as Article_Weight,
IdAdresse,
AdressBez,
AdressTypBez,
ProdBereichBez,
FG=case when
ELSE 'Item not defined' END as typeOfMaterial
,V_Artikel.IdArtikelvariantenTyp as articleIdType,
Round(V_Artikel.ArtikelGewicht, 3) as articleWeight,
IdAdresse as idAddress,
AdressBez as addressDescription,
AdressTypBez as addressType,
ProdBereichBez as profitCenter,
case when
V_Artikel.ProdBereichBez = 'SBM' or
V_Artikel.ProdBereichBez = 'IM-Caps' or
V_Artikel.ProdBereichBez = 'IM-PET' or
@@ -107,15 +108,16 @@ V_Artikel.ProdBereichBez = 'ISBM' or
V_Artikel.ProdBereichBez = 'IM-Finishing'
Then 'FG'
Else 'not Defined Profit Center'
end,
end as fg,
V_Artikel.Umlaeufe as num_of_cycles,
V_FibuKonten_BASIS.FibuKontoNr as CostsCenterId,
V_FibuKonten_BASIS.Bezeichnung as CostCenterDescription,
sales.[KdArtNr] as CustomerArticleNumber,
sales.[KdArtBez] as CustomerArticleDescription,
round(V_Artikel.Zyklus, 2) as CycleTime,
V_FibuKonten_BASIS.FibuKontoNr as costsCenterId,
V_FibuKonten_BASIS.Bezeichnung as costCenterDescription,
sales.[KdArtNr] as customerArticleNumber,
sales.[KdArtBez] as customerArticleDescription,
round(V_Artikel.Zyklus, 2) as cycleTime,
Sypronummer as salesAgreement,
V_Artikel.ProdArtikelBez as ProductFamily
V_Artikel.ProdArtikelBez as productFamily
--,REPLACE(pur.UOM,'UOM:','')
,Case when LEFT(
LTRIM(REPLACE(pur.UOM,'UOM:','')),
@@ -123,7 +125,7 @@ V_Artikel.ProdArtikelBez as ProductFamily
) is null then '1' else LEFT(
LTRIM(REPLACE(pur.UOM,'UOM:','')),
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
) end AS UOM
) end AS uom
--,*
FROM dbo.V_Artikel (nolock)

View File

@@ -1,123 +1,125 @@
import type { Address } from "nodemailer/lib/mailer/index.js";
import type { Transporter } from "nodemailer";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import type Mail from "nodemailer/lib/mailer/index.js";
import os from "os";
import nodemailer from "nodemailer";
import type Mail from "nodemailer/lib/mailer/index.js";
import type { Address } from "nodemailer/lib/mailer/index.js";
import type SMTPTransport from "nodemailer/lib/smtp-transport/index.js";
import hbs from "nodemailer-express-handlebars";
import os from "os";
import path from "path";
import { fileURLToPath } from "url";
import { promisify } from "util";
import hbs from "nodemailer-express-handlebars";
import { createLogger } from "../../logger/logger.js";
interface HandlebarsMailOptions extends Mail.Options {
template: string;
context: Record<string, unknown>;
template: string;
context: Record<string, unknown>;
}
interface EmailData {
email: string;
subject: string;
template: string;
context: Record<string, unknown>;
email: string;
subject: string;
template: string;
context: Record<string, unknown>;
}
export const sendEmail = async (data: EmailData): Promise<any> => {
const log = createLogger({ module: "pkg", subModule: "sendMail" });
let transporter: Transporter;
let fromEmail: string | Address;
const log = createLogger({ module: "pkg", subModule: "sendMail" });
let transporter: Transporter;
let fromEmail: string | Address;
if (
os.hostname().includes("OLP") &&
process.env.EMAIL_USER &&
process.env.EMAIL_PASSWORD
) {
transporter = nodemailer.createTransport({
service: "gmail",
auth: {
user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASSWORD,
},
//debug: true,
});
// if (
// os.hostname().includes("OLP") &&
// process.env.EMAIL_USER &&
// process.env.EMAIL_PASSWORD
// ) {
// transporter = nodemailer.createTransport({
// service: "gmail",
// auth: {
// user: process.env.EMAIL_USER,
// pass: process.env.EMAIL_PASSWORD,
// },
// //debug: true,
// });
// update the from email
fromEmail = process.env.EMAIL_USER;
} else {
// convert to the correct plant token.
// // update the from email
// fromEmail = process.env.EMAIL_USER;
// } else {
// // convert to the correct plant token.
let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
//let host = `${os.hostname().replace("VMS006", "")}-smtp.alpla.net`;
//const testServers = ["vms036", "VMS036"];
//const testServers = ["vms036", "VMS036"];
if (os.hostname().includes("VMS036")) {
host = "USMCD1-smtp.alpla.net";
}
// if (os.hostname().includes("VMS036")) {
// host = "USMCD1-smtp.alpla.net";
// }
// if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net";
// }
// if (plantToken[0].value === "usiow2") {
// host = "USIOW1-smtp.alpla.net";
// }
transporter = nodemailer.createTransport({
host: host,
port: 25,
rejectUnauthorized: false,
//secure: false,
// auth: {
// user: "alplaprod",
// pass: "obelix",
// },
debug: true,
} as SMTPTransport.Options);
transporter = nodemailer.createTransport({
host: "smtp.azurecomm.net",
port: 587,
//rejectUnauthorized: false,
tls: {
minVersion: "TLSv1.2",
},
auth: {
user: "donotreply@mail.alpla.com",
pass: process.env.SMTP_PASSWORD,
},
debug: true,
} as SMTPTransport.Options);
// update the from email
fromEmail = `noreply@alpla.com`;
}
// update the from email
fromEmail = `DoNotReply@mail.alpla.com`;
//}
// creating the handlbar options
const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)),
"./views/"
);
// creating the handlbar options
const viewPath = path.resolve(
path.dirname(fileURLToPath(import.meta.url)),
"./views/",
);
const handlebarOptions = {
viewEngine: {
extname: ".hbs",
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
defaultLayout: "", // Specify the default layout
partialsDir: viewPath,
},
viewPath: viewPath,
extName: ".hbs", // File extension for Handlebars templates
};
const handlebarOptions = {
viewEngine: {
extname: ".hbs",
//layoutsDir: path.resolve(viewPath, "layouts"), // Path to layouts directory
defaultLayout: "", // Specify the default layout
partialsDir: viewPath,
},
viewPath: viewPath,
extName: ".hbs", // File extension for Handlebars templates
};
transporter.use("compile", hbs(handlebarOptions));
transporter.use("compile", hbs(handlebarOptions));
const mailOptions: HandlebarsMailOptions = {
from: fromEmail,
to: data.email,
subject: data.subject,
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
//html: emailTemplate("BlakesTest", "This is an example with css"),
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
context: data.context,
};
const mailOptions: HandlebarsMailOptions = {
from: fromEmail,
to: data.email,
subject: data.subject,
//text: "You will have a reset token here and only have 30min to click the link before it expires.",
//html: emailTemplate("BlakesTest", "This is an example with css"),
template: data.template, // Name of the Handlebars template (e.g., 'welcome.hbs')
context: data.context,
};
// now verify and send the email
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
// now verify and send the email
const sendMailPromise = promisify(transporter.sendMail).bind(transporter);
try {
// Send email and await the result
const info = await sendMailPromise(mailOptions);
log.info(null, `Email was sent to: ${data.email}`);
return { success: true, message: "Email sent.", data: info };
} catch (err) {
console.log(err);
log.error(
{ error: err },
try {
// Send email and await the result
const info = await sendMailPromise(mailOptions);
log.info(null, `Email was sent to: ${data.email}`);
return { success: true, message: "Email sent.", data: info };
} catch (err) {
console.log(err);
log.error(
{ error: err },
`Error sending Email to : ${data.email}`
);
return { success: false, message: "Error sending email.", error: err };
}
`Error sending Email to : ${data.email}`,
);
return { success: false, message: "Error sending email.", error: err };
}
};

View File

@@ -76,7 +76,9 @@ export const prodEndpoint = async <T>(
return {
success: false,
message: "There was an error processing the endpoint",
data: apiError.response.data,
data: apiError.response
? apiError.response.data
: [{ error: "There was an internal error." }],
};
}

View File

@@ -316,7 +316,7 @@ func runNPMInstall(rootDir string, folder string) error {
} else {
folderDir = filepath.Join(rootDir, folder)
}
cmd := exec.Command("npm", "install")
cmd := exec.Command("npm", "install", "--production")
cmd.Dir = folderDir
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr

File diff suppressed because it is too large Load Diff

View File

@@ -11,70 +11,70 @@
},
"dependencies": {
"@dnd-kit/core": "^6.3.1",
"@radix-ui/react-avatar": "^1.1.10",
"@radix-ui/react-avatar": "^1.1.11",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-label": "^2.1.7",
"@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-tooltip": "^1.2.8",
"@react-pdf/renderer": "^4.3.1",
"@tailwindcss/vite": "^4.1.13",
"@tanstack/react-form": "^1.23.0",
"@tanstack/react-query": "^5.89.0",
"@tanstack/react-query-devtools": "^5.90.2",
"@tanstack/react-router": "^1.131.36",
"@tanstack/react-router-devtools": "^1.131.36",
"@tailwindcss/vite": "^4.1.17",
"@tanstack/react-form": "^1.26.0",
"@tanstack/react-query": "^5.90.11",
"@tanstack/react-query-devtools": "^5.91.1",
"@tanstack/react-router": "^1.139.6",
"@tanstack/react-router-devtools": "^1.139.6",
"@tanstack/react-table": "^8.21.3",
"@types/react-calendar-timeline": "^0.28.6",
"axios": "^1.12.2",
"better-auth": "^1.3.11",
"axios": "^1.13.2",
"better-auth": "^1.4.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"date-fns": "^4.1.0",
"is-mobile": "^5.0.0",
"js-cookie": "^3.0.5",
"jsbarcode": "^3.12.1",
"lucide-react": "^0.542.0",
"marked": "^16.4.1",
"lucide-react": "^0.554.0",
"marked": "^17.0.1",
"moment": "^2.30.1",
"r": "^0.0.5",
"react": "^19.1.1",
"react": "^19.2.0",
"react-barcode": "^1.6.1",
"react-calendar-timeline": "^0.30.0-beta.3",
"react-day-picker": "^9.11.1",
"react-dom": "^19.1.1",
"react-hook-form": "^7.65.0",
"react-calendar-timeline": "^0.30.0-beta.4",
"react-day-picker": "^9.11.2",
"react-dom": "^19.2.0",
"react-hook-form": "^7.66.1",
"react-resizable-panels": "^3.0.6",
"recharts": "^2.15.4",
"socket.io-client": "^4.8.1",
"sonner": "^2.0.7",
"tailwind-merge": "^3.3.1",
"tailwindcss": "^4.1.13",
"tailwind-merge": "^3.4.0",
"tailwindcss": "^4.1.17",
"zustand": "^5.0.8"
},
"devDependencies": {
"@eslint/js": "^9.33.0",
"@tanstack/router-plugin": "^1.131.36",
"@eslint/js": "^9.39.1",
"@tanstack/router-plugin": "^1.139.6",
"@types/js-cookie": "^3.0.6",
"@types/node": "^24.3.1",
"@types/react": "^19.1.10",
"@types/react-dom": "^19.1.7",
"@vitejs/plugin-react-swc": "^4.0.0",
"eslint": "^9.33.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.20",
"globals": "^16.3.0",
"tw-animate-css": "^1.3.8",
"typescript": "~5.8.3",
"typescript-eslint": "^8.39.1",
"vite": "^7.1.2"
"@types/node": "^24.10.1",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react-swc": "^4.2.2",
"eslint": "^9.39.1",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"globals": "^16.5.0",
"tw-animate-css": "^1.4.0",
"typescript": "~5.9.3",
"typescript-eslint": "^8.48.0",
"vite": "^7.2.4"
}
}

Some files were not shown because too many files have changed in this diff Show More