Compare commits
88 Commits
74677d12c4
...
v0.0.1-alp
| Author | SHA1 | Date | |
|---|---|---|---|
| d05a0ce930 | |||
| 995b1dda7c | |||
| 97f93a1830 | |||
| 635635b356 | |||
| a691dc276e | |||
| 8dfcbc5720 | |||
| 103ae77e9f | |||
| beeccc6e8d | |||
| 0880298cf5 | |||
| 34b0abac36 | |||
| 28c226ddbc | |||
| 42861cc69e | |||
| 5f3d683a13 | |||
| a17787e852 | |||
| 5865ac3b99 | |||
| 637de857f9 | |||
| 3ecf5fb916 | |||
| 92ba3ef512 | |||
| 7d6c2db89c | |||
| 74262beb65 | |||
| f3b8dd94e5 | |||
| 0059b9b850 | |||
| 1ad789b2b9 | |||
| 079478f932 | |||
| d6d5b451cd | |||
| 76747cf917 | |||
| 6e85991062 | |||
| 98e408cb85 | |||
| ed052dff3c | |||
| 8f59bba614 | |||
| fb2c5609aa | |||
| 17aed6cb89 | |||
| b02b93b83f | |||
| 9ceba8b5bb | |||
| 2c0dbf95c7 | |||
| 860207a60b | |||
| 5c6460012a | |||
| be1d4081e0 | |||
| 83a94cacf3 | |||
| 0ce3790675 | |||
| 5854889eb5 | |||
| 4caaf74569 | |||
| fe889ca757 | |||
| 699c124b0e | |||
| 7d55c5f431 | |||
| c4fd74fc93 | |||
| 3775760734 | |||
| 643d12ff18 | |||
| 82eaa23da7 | |||
| b18d1ced6d | |||
| 69c5cf87fd | |||
| 1fadf0ad25 | |||
| beae6eb648 | |||
| 82ab735982 | |||
| dbd56c1b50 | |||
| 037a473ab7 | |||
| 32998d417f | |||
| ddcb7e76a3 | |||
| 191cb2b698 | |||
| 2021141967 | |||
| 751c8f21ab | |||
| 85073c19d2 | |||
| 6b8d7b53d0 | |||
| e025d0f5cc | |||
| e67e9e6d72 | |||
| 2846b9cb0d | |||
| 5db2a7fe75 | |||
| 81dc575b4f | |||
| bf7d765989 | |||
| 4f24fe4660 | |||
| 68d13b03d3 | |||
| c3379919b9 | |||
| 326c2e125c | |||
| 880902c478 | |||
| 100c9ff9be | |||
| a8af021621 | |||
| 5469a0dc5c | |||
| 2d1f613d39 | |||
| 597d990a69 | |||
| 76503f558b | |||
| 23c000fa7f | |||
| 31f8c368d9 | |||
| 81bd4d6dcb | |||
| 152f7042c9 | |||
| ba4635a7a7 | |||
| 9ca24a266a | |||
| e7a0a3ff21 | |||
| f40a4acad1 |
@@ -1,8 +0,0 @@
|
||||
# Changesets
|
||||
|
||||
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
|
||||
with multi-package repos, or single-package repos to help you version and publish your code. You can
|
||||
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
|
||||
|
||||
We have a quick list of common questions to get you started engaging with this project in
|
||||
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"$schema": "https://unpkg.com/@changesets/config@3.1.2/schema.json",
|
||||
"changelog": "@changesets/cli/changelog",
|
||||
"commit": false,
|
||||
"fixed": [],
|
||||
"linked": [],
|
||||
"access": "restricted",
|
||||
"baseBranch": "main",
|
||||
"updateInternalDependencies": "patch",
|
||||
"ignore": []
|
||||
}
|
||||
@@ -4,4 +4,9 @@ node_modules
|
||||
dist
|
||||
Dockerfile
|
||||
docker-compose.yml
|
||||
npm-debug.log
|
||||
npm-debug.log
|
||||
builds
|
||||
testFiles
|
||||
nssm.exe
|
||||
postgresql-17.9-2-windows-x64.exe
|
||||
VSCodeUserSetup-x64-1.112.0.msi
|
||||
31
.gitea/workflows/docker-build.yml
Normal file
31
.gitea/workflows/docker-build.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Build and Push LST Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout (local)
|
||||
run: |
|
||||
git clone https://git.tuffraid.net/cowch/lst_v3.git .
|
||||
git checkout ${{ gitea.sha }}
|
||||
|
||||
- name: Login to registry
|
||||
run: echo "${{ secrets.PASSWORD }}" | docker login git.tuffraid.net -u "cowch" --password-stdin
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
docker build \
|
||||
-t git.tuffraid.net/cowch/lst_v3:latest \
|
||||
-t git.tuffraid.net/cowch/lst_v3:${{ gitea.sha }} \
|
||||
.
|
||||
|
||||
- name: Push
|
||||
run: |
|
||||
docker push git.tuffraid.net/cowch/lst_v3:latest
|
||||
docker push git.tuffraid.net/cowch/lst_v3:${{ gitea.sha }}
|
||||
229
.gitea/workflows/release.yml
Normal file
229
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,229 @@
|
||||
name: Release and Build Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
# Internal/origin Gitea URL. Do NOT use the Cloudflare fronted URL here.
|
||||
# Examples:
|
||||
# http://gitea.internal.lan:3000
|
||||
# https://gitea-origin.yourdomain.local
|
||||
GITEA_INTERNAL_URL: "https://git.tuffraid.net"
|
||||
|
||||
# Internal/origin registry host. Usually same host as above, but without protocol.
|
||||
# Example:
|
||||
# gitea.internal:3000
|
||||
REGISTRY_HOST: "git.tuffraid.net"
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare release metadata
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TAG="${GITHUB_REF_NAME:-${GITHUB_REF##refs/tags/}}"
|
||||
VERSION="${TAG#v}"
|
||||
IMAGE_NAME="${REGISTRY_HOST}/${{ gitea.repository }}"
|
||||
|
||||
echo "TAG=$TAG" >> "$GITHUB_ENV"
|
||||
echo "VERSION=$VERSION" >> "$GITHUB_ENV"
|
||||
echo "IMAGE_NAME=$IMAGE_NAME" >> "$GITHUB_ENV"
|
||||
|
||||
if [[ "$TAG" == *-* ]]; then
|
||||
echo "PRERELEASE=true" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "PRERELEASE=false" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
echo "Resolved TAG=$TAG"
|
||||
echo "Resolved VERSION=$VERSION"
|
||||
echo "Resolved IMAGE_NAME=$IMAGE_NAME"
|
||||
|
||||
- name: Log in to Gitea container registry
|
||||
shell: bash
|
||||
env:
|
||||
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
||||
REGISTRY_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "$REGISTRY_TOKEN" | docker login "$REGISTRY_HOST" -u "$REGISTRY_USERNAME" --password-stdin
|
||||
|
||||
- name: Build Docker image
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
docker build \
|
||||
-t "$IMAGE_NAME:$TAG" \
|
||||
-t "$IMAGE_NAME:latest" \
|
||||
.
|
||||
|
||||
- name: Push version tag
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
docker push "$IMAGE_NAME:$TAG"
|
||||
|
||||
- name: Push latest tag
|
||||
if: ${{ !contains(env.TAG, '-') }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
docker push "$IMAGE_NAME:latest"
|
||||
|
||||
- name: Push prerelease channel tag
|
||||
if: ${{ contains(env.TAG, '-') }}
|
||||
shell: bash
|
||||
env:
|
||||
TAG: ${{ env.TAG }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CHANNEL="${TAG#*-}"
|
||||
CHANNEL="${CHANNEL%%.*}"
|
||||
|
||||
echo "Resolved prerelease channel: $CHANNEL"
|
||||
|
||||
docker tag "$IMAGE_NAME:$TAG" "$IMAGE_NAME:$CHANNEL"
|
||||
docker push "$IMAGE_NAME:$CHANNEL"
|
||||
|
||||
- name: Extract matching CHANGELOG section
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ env.VERSION }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 - <<'PY'
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
version = os.environ["VERSION"]
|
||||
changelog_path = Path("CHANGELOG.md")
|
||||
|
||||
if not changelog_path.exists():
|
||||
Path("release_body.md").write_text(f"Release {version}\n", encoding="utf-8")
|
||||
raise SystemExit(0)
|
||||
|
||||
text = changelog_path.read_text(encoding="utf-8")
|
||||
|
||||
# Matches headings like:
|
||||
# ## [0.1.0]
|
||||
# ## 0.1.0
|
||||
# ## [0.1.0-alpha.1]
|
||||
pattern = re.compile(
|
||||
rf"^##\s+\[?{re.escape(version)}\]?[^\n]*\n(.*?)(?=^##\s+\[?[^\n]+|\Z)",
|
||||
re.MULTILINE | re.DOTALL,
|
||||
)
|
||||
|
||||
match = pattern.search(text)
|
||||
if match:
|
||||
body = match.group(1).strip()
|
||||
else:
|
||||
body = f"Release {version}"
|
||||
|
||||
if not body:
|
||||
body = f"Release {version}"
|
||||
|
||||
Path("release_body.md").write_text(body + "\n", encoding="utf-8")
|
||||
print("----- release_body.md -----")
|
||||
print(body)
|
||||
print("---------------------------")
|
||||
PY
|
||||
|
||||
- name: Create Gitea release
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
GITEA_REPOSITORY: ${{ gitea.repository }}
|
||||
GITEA_INTERNAL_URL: ${{ env.GITEA_INTERNAL_URL }}
|
||||
TAG: ${{ env.TAG }}
|
||||
PRERELEASE: ${{ env.PRERELEASE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 - <<'PY'
|
||||
import json
|
||||
import os
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
from pathlib import Path
|
||||
|
||||
tag = os.environ["TAG"]
|
||||
prerelease = os.environ["PRERELEASE"].lower() == "true"
|
||||
server_url = os.environ["GITEA_INTERNAL_URL"].rstrip("/")
|
||||
repo = os.environ["GITEA_REPOSITORY"]
|
||||
token = os.environ["RELEASE_TOKEN"]
|
||||
|
||||
body = Path("release_body.md").read_text(encoding="utf-8").strip()
|
||||
|
||||
# Check if the release already exists for this tag
|
||||
get_url = f"{server_url}/api/v1/repos/{repo}/releases/tags/{tag}"
|
||||
get_req = urllib.request.Request(
|
||||
get_url,
|
||||
method="GET",
|
||||
headers={
|
||||
"Authorization": f"token {token}",
|
||||
"Accept": "application/json",
|
||||
"User-Agent": "lst-release-workflow/1.0",
|
||||
},
|
||||
)
|
||||
|
||||
existing_release = None
|
||||
try:
|
||||
with urllib.request.urlopen(get_req) as resp:
|
||||
existing_release = json.loads(resp.read().decode("utf-8"))
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code != 404:
|
||||
details = e.read().decode("utf-8", errors="replace")
|
||||
print("Failed checking existing release:")
|
||||
print(details)
|
||||
raise
|
||||
|
||||
payload = {
|
||||
"tag_name": tag,
|
||||
"name": tag,
|
||||
"body": body,
|
||||
"draft": False,
|
||||
"prerelease": prerelease,
|
||||
}
|
||||
|
||||
data = json.dumps(payload).encode("utf-8")
|
||||
|
||||
if existing_release:
|
||||
release_id = existing_release["id"]
|
||||
url = f"{server_url}/api/v1/repos/{repo}/releases/{release_id}"
|
||||
method = "PATCH"
|
||||
print(f"Release already exists for tag {tag}, updating release id {release_id}")
|
||||
else:
|
||||
url = f"{server_url}/api/v1/repos/{repo}/releases"
|
||||
method = "POST"
|
||||
print(f"No release exists for tag {tag}, creating a new one")
|
||||
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
data=data,
|
||||
method=method,
|
||||
headers={
|
||||
"Authorization": f"token {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"User-Agent": "lst-release-workflow/1.0",
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
print(resp.read().decode("utf-8"))
|
||||
except urllib.error.HTTPError as e:
|
||||
details = e.read().decode("utf-8", errors="replace")
|
||||
print("Release create/update failed:")
|
||||
print(details)
|
||||
raise
|
||||
PY
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1,5 +1,16 @@
|
||||
# ---> Node
|
||||
testFiles
|
||||
builds
|
||||
.includes
|
||||
.buildNumber
|
||||
temp
|
||||
brunoApi
|
||||
.scriptCreds
|
||||
node-v24.14.0-x64.msi
|
||||
postgresql-17.9-2-windows-x64.exe
|
||||
VSCodeUserSetup-x64-1.112.0.exe
|
||||
nssm.exe
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
{ "type": "ci", "hidden": false, "section": "📈 Project changes" },
|
||||
{ "type": "build", "hidden": false, "section": "📈 Project Builds" }
|
||||
],
|
||||
"commitUrlFormat": "https://git.tuffraid.net/cowch/lst/commits/{{hash}}",
|
||||
"compareUrlFormat": "https://git.tuffraid.net/cowch/lst/compare/{{previousTag}}...{{currentTag}}",
|
||||
"commitUrlFormat": "https://git.tuffraid.net/cowch/lst_v3/commits/{{hash}}",
|
||||
"compareUrlFormat": "https://git.tuffraid.net/cowch/lst_v3/compare/{{previousTag}}...{{currentTag}}",
|
||||
"header": "# All Changes to LST can be found below.\n"
|
||||
}
|
||||
1
.vscode/lst.code-snippets
vendored
1
.vscode/lst.code-snippets
vendored
@@ -10,6 +10,7 @@
|
||||
"\tmessage: \"${5:Failed to connect to the prod sql server.}\",",
|
||||
"\tdata: ${6:[]},",
|
||||
"\tnotify: ${7:false},",
|
||||
"\troom: ${8:''},",
|
||||
"});"
|
||||
],
|
||||
"description": "Insert a returnFunc template"
|
||||
|
||||
15
.vscode/settings.json
vendored
15
.vscode/settings.json
vendored
@@ -3,6 +3,8 @@
|
||||
"workbench.colorTheme": "Default Dark+",
|
||||
"terminal.integrated.env.windows": {},
|
||||
"editor.formatOnSave": true,
|
||||
"typescript.preferences.importModuleSpecifier": "relative",
|
||||
"javascript.preferences.importModuleSpecifier": "relative",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.biome": "explicit",
|
||||
"source.organizeImports.biome": "explicit"
|
||||
@@ -52,12 +54,23 @@
|
||||
"alpla",
|
||||
"alplamart",
|
||||
"alplaprod",
|
||||
"alplapurchase",
|
||||
"bookin",
|
||||
"Datamart",
|
||||
"dotenvx",
|
||||
"dyco",
|
||||
"intiallally",
|
||||
"manadatory",
|
||||
"OCME",
|
||||
"onnotice",
|
||||
"opendock",
|
||||
"opendocks",
|
||||
"ppoo",
|
||||
"prodlabels"
|
||||
"preseed",
|
||||
"prodlabels",
|
||||
"prolink",
|
||||
"Skelly",
|
||||
"trycatch"
|
||||
],
|
||||
"gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db",
|
||||
"gitea.instanceURL": "https://git.tuffraid.net",
|
||||
|
||||
91
CHANGELOG.md
91
CHANGELOG.md
@@ -1,7 +1,90 @@
|
||||
# lst_v3
|
||||
# All Changes to LST can be found below.
|
||||
|
||||
## 1.0.1
|
||||
## [0.0.1-alpha.3](https://git.tuffraid.net/cowch/lst_v3/compare/v0.0.1-alpha.2...v0.0.1-alpha.3) (2026-04-10)
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cf18e94: core stuff
|
||||
### 🌟 Enhancements
|
||||
|
||||
* **puchase hist:** finished up purhcase historical / gp updates ([a691dc2](https://git.tuffraid.net/cowch/lst_v3/commits/a691dc276e8650c669409241f73d7b2d7a1f9176))
|
||||
|
||||
|
||||
### 🛠️ Code Refactor
|
||||
|
||||
* **gp connect:** gp connect as was added to long live services ([635635b](https://git.tuffraid.net/cowch/lst_v3/commits/635635b356e1262e1c0b063408fe2209e6a8d4ec))
|
||||
* **reprints:** changes the module and submodule around to be more accurate ([97f93a1](https://git.tuffraid.net/cowch/lst_v3/commits/97f93a1830761437118863372108df810ce9977a))
|
||||
* **send email:** changes the error message to show the true message in the error ([995b1dd](https://git.tuffraid.net/cowch/lst_v3/commits/995b1dda7cdfebf4367d301ccac38fd339fab6dd))
|
||||
|
||||
## [0.0.1-alpha.2](https://git.tuffraid.net/cowch/lst_v3/compare/v0.0.1-alpha.1...v0.0.1-alpha.2) (2026-04-08)
|
||||
|
||||
|
||||
### 📈 Project Builds
|
||||
|
||||
* **release:** docker and release corrections ([103ae77](https://git.tuffraid.net/cowch/lst_v3/commits/103ae77e9f82fc008a8ae143b6feccc3ce802f8c))
|
||||
|
||||
## [0.0.1-alpha.1](https://git.tuffraid.net/cowch/lst_v3/compare/v0.0.1-alpha.0...v0.0.1-alpha.1) (2026-04-08)
|
||||
|
||||
|
||||
* **notifcaion:** style changes to the notificaion card and started the table ([7d6c2db](https://git.tuffraid.net/cowch/lst_v3/commits/7d6c2db89cae1f137f126f5814dccd373f7ccb76))
|
||||
|
||||
|
||||
### 🌟 Enhancements
|
||||
|
||||
* **notification:** base notifcaiton sub and admin compelted ([5865ac3](https://git.tuffraid.net/cowch/lst_v3/commits/5865ac3b99d60005c4245740369b0e0789c8fbbd))
|
||||
* **notification:** reprint added ([a17787e](https://git.tuffraid.net/cowch/lst_v3/commits/a17787e85217f1fa4a5e5389e29c33ec09c286c5))
|
||||
* **puchase history:** purhcase history changed to long running no notification ([34b0aba](https://git.tuffraid.net/cowch/lst_v3/commits/34b0abac36f645d0fe5f508881ddbef81ff04b7c))
|
||||
* **purchase:** historical data capture for alpla purchase ([42861cc](https://git.tuffraid.net/cowch/lst_v3/commits/42861cc69e8d4aba5a9670aaed55417efda2b505))
|
||||
* **user notifications:** added the ability for users to sub to notifications and add multi email ([637de85](https://git.tuffraid.net/cowch/lst_v3/commits/637de857f99499a41f7175181523f5d809d95d7e))
|
||||
|
||||
|
||||
### 🐛 Bug fixes
|
||||
|
||||
* **build:** issue with how i wrote the release token ([fe889ca](https://git.tuffraid.net/cowch/lst_v3/commits/fe889ca75731af08c42ec714b7f2abf17cd1ee40))
|
||||
* **build:** type in how we pushed the header over ([83a94ca](https://git.tuffraid.net/cowch/lst_v3/commits/83a94cacf3fc87287cdc0c0cc861b339e72e4b83))
|
||||
* **build:** typo ([860207a](https://git.tuffraid.net/cowch/lst_v3/commits/860207a60b6e04b15736cba631be6c7eab74d020))
|
||||
* **i suck:** more learning experance ([9ceba8b](https://git.tuffraid.net/cowch/lst_v3/commits/9ceba8b5bba17959f27b16b28f50a83c044863fb))
|
||||
* **lala:** something here ([17aed6c](https://git.tuffraid.net/cowch/lst_v3/commits/17aed6cb89f8220570f6c66f78dba6bb202c1aaa))
|
||||
* **release:** typo that caused errors ([76747cf](https://git.tuffraid.net/cowch/lst_v3/commits/76747cf91738bd0d0530afcf7b4f51f0db11ca98))
|
||||
* **typo:** more dam typos ([079478f](https://git.tuffraid.net/cowch/lst_v3/commits/079478f93217dea31c9a1e8ffed85d2381a6977d))
|
||||
* **wrelease:** forgot to save ([3775760](https://git.tuffraid.net/cowch/lst_v3/commits/377576073449e95d315defb913dc317759cc3f43))
|
||||
|
||||
|
||||
### 📝 Chore
|
||||
|
||||
* **release:** 0.1.0-alpha.10 ([98e408c](https://git.tuffraid.net/cowch/lst_v3/commits/98e408cb8577da18e24821b55474198439434f3e))
|
||||
* **release:** 0.1.0-alpha.11 ([d6d5b45](https://git.tuffraid.net/cowch/lst_v3/commits/d6d5b451cd9aeba642ef94654ca20f4acd0b827c))
|
||||
* **release:** 0.1.0-alpha.12 ([1ad789b](https://git.tuffraid.net/cowch/lst_v3/commits/1ad789b2b91a20a2f5a8dc9e6f39af2e19ec9cdc))
|
||||
* **release:** 0.1.0-alpha.9 ([8f59bba](https://git.tuffraid.net/cowch/lst_v3/commits/8f59bba614a8eaa3105bb56f0db36013d5e68485))
|
||||
* **release:** version packages ([fb2c560](https://git.tuffraid.net/cowch/lst_v3/commits/fb2c5609aa12ea7823783c364d5bd029c48a64bd))
|
||||
* **release:** version packages ([b02b93b](https://git.tuffraid.net/cowch/lst_v3/commits/b02b93b83f488fbcee6d24db080ad0d1fe1c5f59))
|
||||
* **release:** version packages ([2c0dbf9](https://git.tuffraid.net/cowch/lst_v3/commits/2c0dbf95c7b8dfd2c98b476d3f44bc8929668c88))
|
||||
* **release:** version packages ([5c64600](https://git.tuffraid.net/cowch/lst_v3/commits/5c6460012aa70d336fbc9702240b4f19262a6b41))
|
||||
* **release:** version packages ([0ce3790](https://git.tuffraid.net/cowch/lst_v3/commits/0ce3790675bc408762eafe76cbd5ab496fd06e73))
|
||||
* **release:** version packages ([4caaf74](https://git.tuffraid.net/cowch/lst_v3/commits/4caaf745693d4df847aefd3721ac5d0ae792114a))
|
||||
* **release:** version packages ([699c124](https://git.tuffraid.net/cowch/lst_v3/commits/699c124b0efba8282e436210619504bda8878e90))
|
||||
* **release:** version packages ([c4fd74f](https://git.tuffraid.net/cowch/lst_v3/commits/c4fd74fc93226cffd9e39602f507a05cd8ea628b))
|
||||
|
||||
|
||||
### 📚 Documentation
|
||||
|
||||
* **readme:** updated progress data ([92ba3ef](https://git.tuffraid.net/cowch/lst_v3/commits/92ba3ef5121afd0d82d4f40a5a985e1fdc081011))
|
||||
* **sop:** added more info ([be1d408](https://git.tuffraid.net/cowch/lst_v3/commits/be1d4081e07b0982b355a270b7850a852a4398f5))
|
||||
|
||||
|
||||
### 🛠️ Code Refactor
|
||||
|
||||
* **build:** added in more info to the relase section ([5854889](https://git.tuffraid.net/cowch/lst_v3/commits/5854889eb5398feebda50a5d256ce7aec39ce112))
|
||||
* **build:** changes to auto release when we cahnge version ([643d12f](https://git.tuffraid.net/cowch/lst_v3/commits/643d12ff182827e724e1569a583bd625a0d1dd0c))
|
||||
* **build:** changes to the way we do release so it builds as well ([7d55c5f](https://git.tuffraid.net/cowch/lst_v3/commits/7d55c5f43173edb48d8709adcb972b7d8fbc3ebd))
|
||||
* **changelog:** reverted back to commit-chagnelog, like more than changeset for solo dev ([ed052df](https://git.tuffraid.net/cowch/lst_v3/commits/ed052dff3c81a7064660a7d25685e0505065252c))
|
||||
* **notification:** reprint - removed a console log as it shouldnt bc there ([5f3d683](https://git.tuffraid.net/cowch/lst_v3/commits/5f3d683a13c831229674166cced699e373131316))
|
||||
* **notification:** select menu looks propper now ([74262be](https://git.tuffraid.net/cowch/lst_v3/commits/74262beb6596ddc971971cc9214a2688accf3a8e))
|
||||
* **opendock refactor on how releases are posted:** this was a bug maybe just a better refactory ([0880298](https://git.tuffraid.net/cowch/lst_v3/commits/0880298cf53d83e487c706e73854e0874ae2d9da))
|
||||
* **queries:** changed dev version to be 1500ms vs 5000ms ([f3b8dd9](https://git.tuffraid.net/cowch/lst_v3/commits/f3b8dd94e5ebae0cc4dd0a2689a19051942e94b8))
|
||||
* **release:** changes to only have the changelog in the release ([6e85991](https://git.tuffraid.net/cowch/lst_v3/commits/6e8599106298ed13febd069d6fda8b354efb5b7b))
|
||||
* **userprofile:** changes to have the table be blank and say nothing subscribed ([3ecf5fb](https://git.tuffraid.net/cowch/lst_v3/commits/3ecf5fb916d5dc1b1ffb224e2142d94f7a9cb126))
|
||||
|
||||
|
||||
### 📈 Project Builds
|
||||
|
||||
* **agent:** added westbend into the flow ([28c226d](https://git.tuffraid.net/cowch/lst_v3/commits/28c226ddbc37ab85cd6a9a6aec091def3e5623d6))
|
||||
* **changelog:** reset the change log after all crap testing ([0059b9b](https://git.tuffraid.net/cowch/lst_v3/commits/0059b9b850c9647695a3fecaf5927c2e3ee7b192))
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -9,10 +9,13 @@ WORKDIR /app
|
||||
# Copy package files
|
||||
COPY . .
|
||||
|
||||
# Install production dependencies only
|
||||
# build backend
|
||||
RUN npm ci
|
||||
RUN npm run build:docker
|
||||
|
||||
RUN npm run build
|
||||
# build frontend
|
||||
RUN npm --prefix frontend ci
|
||||
RUN npm --prefix frontend run build
|
||||
|
||||
###########
|
||||
# Stage 2 #
|
||||
@@ -33,6 +36,9 @@ RUN npm ci --omit=dev
|
||||
|
||||
|
||||
COPY --from=build /app/dist ./dist
|
||||
COPY --from=build /app/frontend/dist ./frontend/dist
|
||||
|
||||
# TODO add in drizzle migrates
|
||||
|
||||
ENV RUNNING_IN_DOCKER=true
|
||||
EXPOSE 3000
|
||||
|
||||
10
README.md
10
README.md
@@ -7,7 +7,7 @@
|
||||
Quick summary of current rewrite/migration goal.
|
||||
|
||||
- **Phase:** Backend rewrite
|
||||
- **Last updated:** 2024-05-01
|
||||
- **Last updated:** 2026-04-06
|
||||
|
||||
---
|
||||
|
||||
@@ -16,9 +16,9 @@ Quick summary of current rewrite/migration goal.
|
||||
| Feature | Description | Status |
|
||||
|----------|--------------|--------|
|
||||
| User Authentication | ~~Login~~, ~~Signup~~, API Key | 🟨 In Progress |
|
||||
| User Profile | Edit profile, upload avatar | ⏳ Not Started |
|
||||
| User Profile | ~~Edit profile~~, upload avatar | 🟨 In Progress |
|
||||
| User Admin | Edit user, create user, remove user, alplaprod user integration | ⏳ Not Started |
|
||||
| Notifications | Subscribe, Create, Update, Remove, Manual Trigger | ⏳ Not Started |
|
||||
| Notifications | ~~Subscribe~~, ~~Create~~, ~~Update~~, ~~~~Remove~~, Manual Trigger | 🟨 In Progress |
|
||||
| Datamart | Create, Update, Run, Deactivate | 🔧 In Progress |
|
||||
| Frontend | Analytics and charts | ⏳ Not Started |
|
||||
| Docs | Instructions and trouble shooting | ⏳ Not Started |
|
||||
@@ -44,7 +44,7 @@ _Status legend:_
|
||||
How to run the current version of the app.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/youruser/yourrepo.git
|
||||
cd yourrepo
|
||||
git clone https://git.tuffraid.net/cowch/lst_v3.git
|
||||
cd lst_v3
|
||||
npm install
|
||||
npm run dev
|
||||
@@ -1,10 +1,12 @@
|
||||
import { dirname, join } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { toNodeHandler } from "better-auth/node";
|
||||
import express from "express";
|
||||
import morgan from "morgan";
|
||||
import { createLogger } from "./src/logger/logger.controller.js";
|
||||
import { setupRoutes } from "./src/routeHandler.routes.js";
|
||||
import { auth } from "./src/utils/auth.utils.js";
|
||||
import { lstCors } from "./src/utils/cors.utils.js";
|
||||
import { createLogger } from "./logger/logger.controller.js";
|
||||
import { setupRoutes } from "./routeHandler.routes.js";
|
||||
import { auth } from "./utils/auth.utils.js";
|
||||
import { lstCors } from "./utils/cors.utils.js";
|
||||
|
||||
const createApp = async () => {
|
||||
const log = createLogger({ module: "system", subModule: "main start" });
|
||||
@@ -20,15 +22,34 @@ const createApp = async () => {
|
||||
baseUrl = "/lst";
|
||||
}
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
// well leave this active so we can monitor it to validate
|
||||
app.use(morgan("tiny"));
|
||||
app.use(morgan("dev"));
|
||||
app.set("trust proxy", true);
|
||||
app.all(`${baseUrl}api/auth/*splat`, toNodeHandler(auth));
|
||||
app.use(express.json());
|
||||
app.use(lstCors());
|
||||
app.all(`${baseUrl}/api/auth/*splat`, toNodeHandler(auth));
|
||||
app.use(express.json());
|
||||
setupRoutes(baseUrl, app);
|
||||
|
||||
log.info("Express app created");
|
||||
app.use(
|
||||
`${baseUrl}/app`,
|
||||
express.static(join(__dirname, "../frontend/dist")),
|
||||
);
|
||||
|
||||
app.get(`${baseUrl}/app/*splat`, (_, res) => {
|
||||
res.sendFile(join(__dirname, "../frontend/dist/index.html"));
|
||||
});
|
||||
|
||||
app.all("*foo", (_, res) => {
|
||||
res.status(400).json({
|
||||
message:
|
||||
"You have encountered a route that dose not exist, please check the url and try again",
|
||||
});
|
||||
});
|
||||
|
||||
log.info("Lst app created");
|
||||
return { app, baseUrl };
|
||||
};
|
||||
|
||||
|
||||
@@ -54,7 +54,8 @@ const signin = z.union([
|
||||
const r = Router();
|
||||
|
||||
r.post("/", async (req, res) => {
|
||||
let login: unknown;
|
||||
let login: unknown | any;
|
||||
|
||||
try {
|
||||
const validated = signin.parse(req.body);
|
||||
if ("email" in validated) {
|
||||
@@ -92,6 +93,27 @@ r.post("/", async (req, res) => {
|
||||
password: validated.password,
|
||||
},
|
||||
headers: fromNodeHeaders(req.headers),
|
||||
asResponse: true,
|
||||
});
|
||||
|
||||
if (login.status === 401) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "auth",
|
||||
message: `Incorrect username or password please try again`,
|
||||
data: [],
|
||||
status: 401, //connect.success ? 200 : 400,
|
||||
});
|
||||
}
|
||||
|
||||
login.headers.forEach((value: string, key: string) => {
|
||||
if (key.toLowerCase() === "set-cookie") {
|
||||
res.append("set-cookie", value);
|
||||
} else {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { APIError } from "better-auth";
|
||||
import { count, sql } from "drizzle-orm";
|
||||
import { count, eq, sql } from "drizzle-orm";
|
||||
import { Router } from "express";
|
||||
import z from "zod";
|
||||
import { db } from "../db/db.controller.js";
|
||||
@@ -58,7 +58,10 @@ r.post("/", async (req, res) => {
|
||||
// if we have no users yet lets make this new one the admin
|
||||
if (userCount === 0) {
|
||||
// make this user an admin
|
||||
await db.update(user).set({ role: "admin", updatedAt: sql`NOW()` });
|
||||
await db
|
||||
.update(user)
|
||||
.set({ role: "admin", updatedAt: sql`NOW()` })
|
||||
.where(eq(user.id, newUser.user.id));
|
||||
}
|
||||
|
||||
apiReturn(res, {
|
||||
@@ -78,7 +81,7 @@ r.post("/", async (req, res) => {
|
||||
// details: flattened,
|
||||
// });
|
||||
|
||||
apiReturn(res, {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
@@ -90,7 +93,7 @@ r.post("/", async (req, res) => {
|
||||
}
|
||||
|
||||
if (err instanceof APIError) {
|
||||
apiReturn(res, {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
@@ -101,7 +104,7 @@ r.post("/", async (req, res) => {
|
||||
});
|
||||
}
|
||||
|
||||
apiReturn(res, {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
23
backend/configs/gpSql.config.ts
Normal file
23
backend/configs/gpSql.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import type sql from "mssql";
|
||||
|
||||
const username = "gpviewer";
|
||||
const password = "gp$$ViewOnly!";
|
||||
|
||||
export const gpSqlConfig: sql.config = {
|
||||
server: `USMCD1VMS011`,
|
||||
database: `ALPLA`,
|
||||
user: username,
|
||||
password: password,
|
||||
options: {
|
||||
encrypt: true,
|
||||
trustServerCertificate: true,
|
||||
},
|
||||
requestTimeout: 90000, // how long until we kill the query and fail it
|
||||
pool: {
|
||||
max: 20, // Maximum number of connections in the pool
|
||||
min: 0, // Minimum number of connections in the pool
|
||||
idleTimeoutMillis: 10000, // How long a connection is allowed to be idle before being released
|
||||
reapIntervalMillis: 1000, // how often to check for idle resources to destroy
|
||||
acquireTimeoutMillis: 100000, // How long until a complete timeout happens
|
||||
},
|
||||
};
|
||||
@@ -5,13 +5,14 @@ import type { Express } from "express";
|
||||
//const __filename = fileURLToPath(import.meta.url);
|
||||
// const __dirname = path.dirname(__filename);
|
||||
|
||||
import os from "node:os";
|
||||
import { apiReference } from "@scalar/express-api-reference";
|
||||
// const port = 3000;
|
||||
import type { OpenAPIV3_1 } from "openapi-types";
|
||||
import { datamartAddSpec } from "../scaler/datamartAdd.spec.js";
|
||||
import { datamartUpdateSpec } from "../scaler/datamartUpdate.spec.js";
|
||||
import { getDatamartSpec } from "../scaler/getDatamart.spec.js";
|
||||
import { cronerActiveJobs } from "../scaler/cronerActiveJobs.spec.js";
|
||||
import { cronerStatusChange } from "../scaler/cronerStatusChange.spec.js";
|
||||
import { prodLoginSpec } from "../scaler/login.spec.js";
|
||||
import { openDockApt } from "../scaler/opendockGetRelease.spec.js";
|
||||
import { prodRestartSpec } from "../scaler/prodSqlRestart.spec.js";
|
||||
import { prodStartSpec } from "../scaler/prodSqlStart.spec.js";
|
||||
import { prodStopSpec } from "../scaler/prodSqlStop.spec.js";
|
||||
@@ -28,10 +29,12 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: `http://localhost:3000${process.env.NODE_ENV?.trim() !== "production" ? "/lst" : "/"}`,
|
||||
// TODO: change this to the https:// if we are in production and port if not.
|
||||
url: `http://${os.hostname()}:3000${process.env.NODE_ENV?.trim() !== "production" ? "/lst" : "/"}`,
|
||||
description: "Development server",
|
||||
},
|
||||
],
|
||||
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
@@ -50,6 +53,11 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
||||
scheme: "basic",
|
||||
description: "Basic authentication using username and password",
|
||||
},
|
||||
cookieAuth: {
|
||||
type: "apiKey",
|
||||
in: "cookie",
|
||||
name: "better-auth.session_token",
|
||||
},
|
||||
},
|
||||
// schemas: {
|
||||
// Error: {
|
||||
@@ -61,7 +69,12 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
||||
// },
|
||||
// },.
|
||||
},
|
||||
|
||||
// security: [
|
||||
// {
|
||||
// cookieAuth: [],
|
||||
// basicAuth: [],
|
||||
// },
|
||||
// ],
|
||||
tags: [
|
||||
{
|
||||
name: "Auth",
|
||||
@@ -73,9 +86,12 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
||||
description: "All system endpoints that will be available to run",
|
||||
},
|
||||
{
|
||||
name: "Datamart",
|
||||
description:
|
||||
"All Special queries to run based on there names.\n Refer to the docs to see all possible queries that can be ran here, you can also run the getQueries to see available.",
|
||||
name: "Utils",
|
||||
description: "All routes related to the utilities on the server",
|
||||
},
|
||||
{
|
||||
name: "Open Dock",
|
||||
description: "All routes related to the opendock on the server",
|
||||
},
|
||||
// { name: "TMS", description: "TMS integration" },
|
||||
],
|
||||
@@ -83,14 +99,21 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
||||
};
|
||||
|
||||
export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
||||
const mergedDatamart = {
|
||||
"/api/datamart": {
|
||||
...(getDatamartSpec["/api/datamart"] ?? {}),
|
||||
...(datamartAddSpec["/api/datamart"] ?? {}),
|
||||
...(datamartUpdateSpec["/api/datamart"] ?? {}),
|
||||
},
|
||||
"/api/datamart/{name}": getDatamartSpec["/api/datamart/{name}"],
|
||||
};
|
||||
// const mergedDatamart = {
|
||||
// "/api/datamart": {
|
||||
// ...(cronerActiveJobs["/api/datamart"] ?? {}),
|
||||
// ...(datamartAddSpec["/api/datamart"] ?? {}),
|
||||
// ...(datamartUpdateSpec["/api/datamart"] ?? {}),
|
||||
// },
|
||||
// "/api/datamart/{name}": getDatamartSpec["/api/datamart/{name}"],
|
||||
// };
|
||||
|
||||
// const mergeUtils = {
|
||||
// "/api/utils/croner": {
|
||||
// ...(cronerActiveJobs["/api/utils/croner"] ?? {}),
|
||||
// },
|
||||
// "/api/utils/{name}": cronerActiveJobs["/api/utils/{name}"],
|
||||
// };
|
||||
|
||||
const fullSpec = {
|
||||
...openApiBase,
|
||||
@@ -101,7 +124,10 @@ export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
||||
...prodRestartSpec,
|
||||
...prodLoginSpec,
|
||||
...prodRegisterSpec,
|
||||
...mergedDatamart,
|
||||
//...mergedDatamart,
|
||||
...cronerActiveJobs,
|
||||
...cronerStatusChange,
|
||||
...openDockApt,
|
||||
|
||||
// Add more specs here as you build features
|
||||
},
|
||||
@@ -115,7 +141,9 @@ export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
||||
apiReference({
|
||||
url: `${baseUrl}/api/docs.json`,
|
||||
theme: "purple",
|
||||
|
||||
darkMode: true,
|
||||
persistAuth: true,
|
||||
authentication: {
|
||||
securitySchemes: {
|
||||
httpBasic: {
|
||||
@@ -137,7 +165,7 @@ export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
||||
// Clojure
|
||||
clojure: ["clj_http"],
|
||||
// C#
|
||||
csharp: ["httpclient", "restsharp"],
|
||||
// csharp: ["httpclient", "restsharp"],
|
||||
// Dart
|
||||
dart: ["http"],
|
||||
// F#
|
||||
@@ -13,48 +13,72 @@
|
||||
*
|
||||
* when a criteria is password over we will handle it by counting how many were passed up to 3 then deal with each one respectively
|
||||
*/
|
||||
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { datamart } from "../db/schema/datamart.schema.js";
|
||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { datamartData } from "./datamartData.utlis.js";
|
||||
|
||||
type Options = {
|
||||
name: string;
|
||||
value: string;
|
||||
};
|
||||
type Data = {
|
||||
name: string;
|
||||
options: string;
|
||||
options: Options;
|
||||
optionsRequired?: boolean;
|
||||
howManyOptionsRequired?: number;
|
||||
};
|
||||
|
||||
export const runDatamartQuery = async (data: Data) => {
|
||||
// search the query db for the query by name
|
||||
const { data: queryInfo, error: qIe } = await tryCatch(
|
||||
db.select().from(datamart).where(eq(datamart.name, data.name)),
|
||||
);
|
||||
const sqlQuery = sqlQuerySelector(`${data.name}`) as SqlQuery;
|
||||
|
||||
if (qIe) {
|
||||
const getDataMartInfo = datamartData.filter((x) => x.endpoint === data.name);
|
||||
|
||||
// const optionsMissing =
|
||||
// !data.options || Object.keys(data.options).length === 0;
|
||||
|
||||
const optionCount =
|
||||
Object.keys(data.options).length ===
|
||||
getDataMartInfo[0]?.howManyOptionsRequired;
|
||||
|
||||
if (getDataMartInfo[0]?.optionsRequired && !optionCount) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "datamart",
|
||||
subModule: "query",
|
||||
message: `This query is required to have the ${getDataMartInfo[0]?.howManyOptionsRequired} options set in order use it.`,
|
||||
data: [getDataMartInfo[0].options],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
|
||||
if (!sqlQuery.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "datamart",
|
||||
subModule: "query",
|
||||
message: `Error getting ${data.name} info`,
|
||||
data: [qIe],
|
||||
data: [sqlQuery.message],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
|
||||
// create the query with no changed just to have it here
|
||||
let datamartQuery = queryInfo[0]?.query || "";
|
||||
let datamartQuery = sqlQuery?.query || "";
|
||||
|
||||
// split the criteria by "," then and then update the query
|
||||
if (data.options) {
|
||||
const params = new URLSearchParams(data.options);
|
||||
|
||||
for (const [rawKey, rawValue] of params.entries()) {
|
||||
const key = rawKey.trim();
|
||||
const value = rawValue.trim();
|
||||
datamartQuery = datamartQuery.replaceAll(`[${key}]`, value);
|
||||
}
|
||||
Object.entries(data.options ?? {}).forEach(([key, value]) => {
|
||||
const pattern = new RegExp(`\\[${key.trim()}\\]`, "g");
|
||||
datamartQuery = datamartQuery.replace(pattern, String(value).trim());
|
||||
});
|
||||
}
|
||||
|
||||
const { data: queryRun, error } = await tryCatch(
|
||||
60
backend/datamart/datamart.routes.ts
Normal file
60
backend/datamart/datamart.routes.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { Express } from "express";
|
||||
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { datamartData } from "./datamartData.utlis.js";
|
||||
import runQuery from "./getDatamart.route.js";
|
||||
|
||||
export const setupDatamartRoutes = (baseUrl: string, app: Express) => {
|
||||
// the sync callback.
|
||||
// app.get(`${baseUrl}/api/datamart/sync`, async (req, res) => {
|
||||
// const { time } = req.query;
|
||||
// const now = new Date();
|
||||
|
||||
// const minutes = parseInt(time as string, 10) || 15;
|
||||
// const cutoff = new Date(now.getTime() - minutes * 60 * 1000);
|
||||
|
||||
// const results = await db
|
||||
// .select()
|
||||
// .from(datamart)
|
||||
// .where(time ? gte(datamart.upd_date, cutoff) : sql`true`);
|
||||
|
||||
// return apiReturn(res, {
|
||||
// success: true,
|
||||
// level: "info",
|
||||
// module: "datamart",
|
||||
// subModule: "query",
|
||||
// message: `All Queries older than ${parseInt(process.env.QUERY_CHECK?.trim() || "15", 10)}min `,
|
||||
// data: results,
|
||||
// status: 200,
|
||||
// });
|
||||
// });
|
||||
|
||||
//setup all the routes
|
||||
|
||||
app.use(`${baseUrl}/api/datamart`, runQuery);
|
||||
|
||||
// just sending a get on datamart will return all the queries that we can call.
|
||||
app.get(`${baseUrl}/api/datamart`, async (_, res) => {
|
||||
// const queries = await db
|
||||
// .select({
|
||||
// id: datamart.id,
|
||||
// name: datamart.name,
|
||||
// description: datamart.description,
|
||||
// options: datamart.options,
|
||||
// version: datamart.version,
|
||||
// upd_date: datamart.upd_date,
|
||||
// })
|
||||
// .from(datamart)
|
||||
// .where(and(eq(datamart.active, true), eq(datamart.public, true)));
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "datamart",
|
||||
subModule: "query",
|
||||
message: "All active queries we can run",
|
||||
data: datamartData,
|
||||
status: 200,
|
||||
});
|
||||
});
|
||||
};
|
||||
24
backend/datamart/datamartData.utlis.ts
Normal file
24
backend/datamart/datamartData.utlis.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* will store and maintain all queries for datamart here.
|
||||
* this way they can all be easily maintained and updated as we progress with the changes and updates to v3
|
||||
*
|
||||
* for options when putting them into the docs we will show examples on how to pull this
|
||||
*/
|
||||
|
||||
export const datamartData = [
|
||||
{
|
||||
name: "Active articles",
|
||||
endpoint: "activeArticles",
|
||||
description: "returns all active articles for the server with custom data",
|
||||
options: "", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
|
||||
optionsRequired: false,
|
||||
},
|
||||
{
|
||||
name: "Delivery by date range",
|
||||
endpoint: "deliveryByDateRange",
|
||||
description: `Returns all Deliverys in selected date range IE: 1/1/${new Date(Date.now()).getFullYear()} to 1/31/${new Date(Date.now()).getFullYear()}`,
|
||||
options: "startDate,endDate", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
|
||||
optionsRequired: true,
|
||||
howManyOptionsRequired: 2,
|
||||
},
|
||||
];
|
||||
@@ -4,11 +4,14 @@ import { runDatamartQuery } from "./datamart.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
type Options = {
|
||||
name: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
r.get("/:name", async (req, res) => {
|
||||
const { name } = req.params;
|
||||
const options = new URLSearchParams(
|
||||
req.query as Record<string, string>,
|
||||
).toString();
|
||||
const options = req.query as Options;
|
||||
|
||||
const dataRan = await runDatamartQuery({ name, options });
|
||||
return apiReturn(res, {
|
||||
72
backend/db/dbCleanup.controller.ts
Normal file
72
backend/db/dbCleanup.controller.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { delay } from "../utils/delay.utils.js";
|
||||
import { db } from "./db.controller.js";
|
||||
|
||||
type DBCount = {
|
||||
count: string;
|
||||
};
|
||||
|
||||
const tableMap = {
|
||||
logs: "logs",
|
||||
jobs: "job_audit_log",
|
||||
opendockApt: "opendock_apt",
|
||||
} as const;
|
||||
|
||||
type CleanupTable = keyof typeof tableMap;
|
||||
|
||||
/**
|
||||
* We will clean up the db based on age.
|
||||
* @param name database to run the cleanup on
|
||||
* @param daysToKeep optional default will be 90 days
|
||||
*/
|
||||
export const dbCleanup = async (name: CleanupTable, daysToKeep?: number) => {
|
||||
const log = createLogger({ module: "db", subModule: "cleanup" });
|
||||
// TODO: send backup of this to another server, via post or something maybe have to reduce the limit but well tackle that later.
|
||||
|
||||
if (!daysToKeep) {
|
||||
daysToKeep = 90;
|
||||
}
|
||||
const limit = 1000;
|
||||
const delayTime = 250;
|
||||
let rowsDeleted: number;
|
||||
|
||||
const dbCount = (await db.execute(
|
||||
`select count(*) from public.${tableMap[name]} WHERE created_at < NOW() - INTERVAL '${daysToKeep} days'`,
|
||||
)) as DBCount[];
|
||||
|
||||
const loopCount = Math.ceil(
|
||||
parseInt(dbCount[0]?.count ?? `${limit}`, 10) / limit,
|
||||
);
|
||||
|
||||
if (parseInt(dbCount[0]?.count ?? `${limit}`, 10) > 1) {
|
||||
log.info(
|
||||
`Table clean up for: ${name}, that are older than ${daysToKeep} day, will be removed, There is ${loopCount} loops to be completed, Approx time: ${((loopCount * delayTime) / 1000 / 60).toFixed(2)} min(s).`,
|
||||
);
|
||||
} else {
|
||||
log.info(`Table clean up for: ${name}, Currently has nothing to clean up.`);
|
||||
return;
|
||||
}
|
||||
|
||||
do {
|
||||
// cleanup logs
|
||||
const deleted = await db.execute(`
|
||||
DELETE FROM public.${tableMap[name]}
|
||||
WHERE id IN (
|
||||
SELECT id
|
||||
FROM public.${tableMap[name]}
|
||||
WHERE created_at < NOW() - INTERVAL '${daysToKeep} days'
|
||||
ORDER BY created_at
|
||||
LIMIT ${limit}
|
||||
)
|
||||
RETURNING id;
|
||||
`);
|
||||
|
||||
rowsDeleted = deleted.length;
|
||||
|
||||
if (rowsDeleted > 0) {
|
||||
await delay(delayTime);
|
||||
}
|
||||
} while (rowsDeleted === limit);
|
||||
|
||||
log.info(`Table clean up for: ${name}, Has completed.`);
|
||||
};
|
||||
39
backend/db/schema/alplapurchase.schema.ts
Normal file
39
backend/db/schema/alplapurchase.schema.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
integer,
|
||||
jsonb,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import type { z } from "zod";
|
||||
|
||||
export const alplaPurchaseHistory = pgTable("alpla_purchase_history", {
|
||||
id: uuid("id").defaultRandom().primaryKey(),
|
||||
apo: integer("apo"),
|
||||
revision: integer("revision"),
|
||||
confirmed: integer("confirmed"),
|
||||
status: integer("status"),
|
||||
statusText: text("status_text"),
|
||||
journalNum: integer("journal_num"),
|
||||
add_date: timestamp("add_date").defaultNow(),
|
||||
add_user: text("add_user"),
|
||||
upd_user: text("upd_user"),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
remark: text("remark"),
|
||||
approvedStatus: text("approved_status").default("new"),
|
||||
position: jsonb("position").default([]),
|
||||
createdAt: timestamp("created_at").defaultNow(),
|
||||
updatedAt: timestamp("updated_at").defaultNow(),
|
||||
});
|
||||
|
||||
export const alplaPurchaseHistorySchema =
|
||||
createSelectSchema(alplaPurchaseHistory);
|
||||
export const newAlplaPurchaseHistorySchema =
|
||||
createInsertSchema(alplaPurchaseHistory);
|
||||
|
||||
export type AlplaPurchaseHistory = z.infer<typeof alplaPurchaseHistorySchema>;
|
||||
export type NewAlplaPurchaseHistory = z.infer<
|
||||
typeof newAlplaPurchaseHistorySchema
|
||||
>;
|
||||
41
backend/db/schema/auditLog.schema.ts
Normal file
41
backend/db/schema/auditLog.schema.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import {
|
||||
index,
|
||||
integer,
|
||||
jsonb,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import type { z } from "zod";
|
||||
|
||||
export const jobAuditLog = pgTable(
|
||||
"job_audit_log",
|
||||
{
|
||||
id: uuid("id").defaultRandom().primaryKey(),
|
||||
jobName: text("job_name"),
|
||||
startedAt: timestamp("start_at"),
|
||||
finishedAt: timestamp("finished_at"),
|
||||
durationMs: integer("duration_ms"),
|
||||
status: text("status"), //success | error
|
||||
errorMessage: text("error_message"),
|
||||
errorStack: text("error_stack"),
|
||||
metadata: jsonb("meta_data"),
|
||||
createdAt: timestamp("created_at").defaultNow(),
|
||||
},
|
||||
(table) => {
|
||||
return {
|
||||
cleanupIdx: index("idx_job_audit_logs_cleanup").on(
|
||||
table.startedAt,
|
||||
table.id,
|
||||
),
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
export const jobAuditLogSchema = createSelectSchema(jobAuditLog);
|
||||
export const newJobAuditLogSchema = createInsertSchema(jobAuditLog);
|
||||
|
||||
export type JobAuditLog = z.infer<typeof jobAuditLogSchema>;
|
||||
export type NewJobAuditLog = z.infer<typeof newJobAuditLogSchema>;
|
||||
@@ -17,6 +17,7 @@ export const datamart = pgTable("datamart", {
|
||||
version: integer("version").default(1).notNull(),
|
||||
active: boolean("active").default(true),
|
||||
options: text("options").default(""),
|
||||
public: boolean("public_access").default(false),
|
||||
add_date: timestamp("add_date").defaultNow(),
|
||||
add_user: text("add_user").default("lst-system"),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
29
backend/db/schema/notifications.schema.ts
Normal file
29
backend/db/schema/notifications.schema.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import {
|
||||
boolean,
|
||||
jsonb,
|
||||
pgTable,
|
||||
text,
|
||||
uniqueIndex,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import type z from "zod";
|
||||
|
||||
export const notifications = pgTable(
|
||||
"notifications",
|
||||
{
|
||||
id: uuid("id").defaultRandom().primaryKey(),
|
||||
name: text("name").notNull(),
|
||||
description: text("description").notNull(),
|
||||
active: boolean("active").default(false),
|
||||
interval: text("interval").default("5"),
|
||||
options: jsonb("options").default([]),
|
||||
},
|
||||
(table) => [uniqueIndex("notify_name").on(table.name)],
|
||||
);
|
||||
|
||||
export const notificationSchema = createSelectSchema(notifications);
|
||||
export const newNotificationSchema = createInsertSchema(notifications);
|
||||
|
||||
export type Notification = z.infer<typeof notificationSchema>;
|
||||
export type NewNotification = z.infer<typeof newNotificationSchema>;
|
||||
30
backend/db/schema/notifications.sub.schema.ts
Normal file
30
backend/db/schema/notifications.sub.schema.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { pgTable, text, unique, uuid } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import type z from "zod";
|
||||
import { user } from "./auth.schema.js";
|
||||
import { notifications } from "./notifications.schema.js";
|
||||
|
||||
export const notificationSub = pgTable(
|
||||
"notification_sub",
|
||||
{
|
||||
id: uuid("id").defaultRandom().primaryKey(),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: "cascade" }),
|
||||
notificationId: uuid("notification_id")
|
||||
.notNull()
|
||||
.references(() => notifications.id, { onDelete: "cascade" }),
|
||||
emails: text("emails").array().default([]),
|
||||
},
|
||||
(table) => ({
|
||||
userNotificationUnique: unique(
|
||||
"notification_sub_user_notification_unique",
|
||||
).on(table.userId, table.notificationId),
|
||||
}),
|
||||
);
|
||||
|
||||
export const notificationSubSchema = createSelectSchema(notificationSub);
|
||||
export const newNotificationSubSchema = createInsertSchema(notificationSub);
|
||||
|
||||
export type NotificationSub = z.infer<typeof notificationSubSchema>;
|
||||
export type NewNotificationSub = z.infer<typeof newNotificationSubSchema>;
|
||||
35
backend/db/schema/opendock.schema.ts
Normal file
35
backend/db/schema/opendock.schema.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import {
|
||||
index,
|
||||
integer,
|
||||
jsonb,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
import type { z } from "zod";
|
||||
|
||||
export const opendockApt = pgTable(
|
||||
"opendock_apt",
|
||||
{
|
||||
id: uuid("id").defaultRandom().primaryKey(),
|
||||
release: integer("release").notNull().unique(),
|
||||
openDockAptId: text("open_dock_apt_id").notNull(),
|
||||
appointment: jsonb("appointment").notNull().default([]),
|
||||
upd_date: timestamp("upd_date").notNull().defaultNow(),
|
||||
createdAt: timestamp("created_at").notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
releaseIdx: index("opendock_apt_release_idx").on(table.release),
|
||||
openDockAptIdIdx: index("opendock_apt_opendock_id_idx").on(
|
||||
table.openDockAptId,
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
export const opendockAptSchema = createSelectSchema(opendockApt);
|
||||
export const newOpendockAptSchema = createInsertSchema(opendockApt);
|
||||
|
||||
export type OpendockApt = z.infer<typeof opendockAptSchema>;
|
||||
export type NewOpendockApt = z.infer<typeof newOpendockAptSchema>;
|
||||
6
backend/db/schema/printerLogs.schema.ts
Normal file
6
backend/db/schema/printerLogs.schema.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { integer, pgTable, text } from "drizzle-orm/pg-core";
|
||||
|
||||
export const opendockApt = pgTable("printer_log", {
|
||||
id: integer().primaryKey().generatedAlwaysAsIdentity(),
|
||||
name: text("name").notNull(),
|
||||
});
|
||||
53
backend/db/schema/settings.schema.ts
Normal file
53
backend/db/schema/settings.schema.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import {
|
||||
boolean,
|
||||
integer,
|
||||
jsonb,
|
||||
pgEnum,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uniqueIndex,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
export const settingType = pgEnum("setting_type", [
|
||||
"feature", // when changed deals with triggering the croner related to this
|
||||
"system", // when changed fires a system restart but this should be rare and all these settings should be in the env
|
||||
"standard", // will be effected by the next process, either croner or manual trigger
|
||||
]);
|
||||
|
||||
export const settings = pgTable(
|
||||
"settings",
|
||||
{
|
||||
id: uuid("settings_id").defaultRandom().primaryKey(),
|
||||
name: text("name").notNull(),
|
||||
value: text("value").notNull(), // this is used in junction with active, only needed if the setting isn't a bool
|
||||
description: text("description"),
|
||||
moduleName: text("moduleName"), // what part of lst dose it belong to this is used to split the settings out later
|
||||
active: boolean("active").default(true),
|
||||
roles: jsonb("roles").$type<string[]>().notNull().default(["systemAdmin"]), // role or roles to see this goes along with the moduleName, need to have a x role in module to see this setting.
|
||||
settingType: settingType(),
|
||||
seedVersion: integer("seed_version").default(1), // this is intended for if we want to update the settings.
|
||||
add_User: text("add_User").default("LST_System").notNull(),
|
||||
add_Date: timestamp("add_Date").defaultNow(),
|
||||
upd_user: text("upd_User").default("LST_System").notNull(),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
},
|
||||
(table) => [
|
||||
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||
uniqueIndex("name").on(table.name),
|
||||
],
|
||||
);
|
||||
|
||||
export const settingSchema = createSelectSchema(settings);
|
||||
export const newSettingSchema = createInsertSchema(settings, {
|
||||
name: z.string().min(3, {
|
||||
message: "The name of the setting must be longer than 3 letters",
|
||||
}),
|
||||
});
|
||||
|
||||
export type Setting = z.infer<typeof settingSchema>;
|
||||
export type NewSetting = z.infer<typeof newSettingSchema>;
|
||||
10
backend/db/schema/stats.schema.ts
Normal file
10
backend/db/schema/stats.schema.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { InferSelectModel } from "drizzle-orm";
|
||||
import { integer, pgTable, text, timestamp } from "drizzle-orm/pg-core";
|
||||
|
||||
export const serverStats = pgTable("stats", {
|
||||
id: text("id").primaryKey().default("serverStats"),
|
||||
build: integer("build").notNull().default(1),
|
||||
lastUpdate: timestamp("last_update").defaultNow(),
|
||||
});
|
||||
|
||||
export type ServerStats = InferSelectModel<typeof serverStats>;
|
||||
17
backend/gpSql/gpSql.routes.ts
Normal file
17
backend/gpSql/gpSql.routes.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { type Express, Router } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import restart from "./gpSqlRestart.route.js";
|
||||
import start from "./gpSqlStart.route.js";
|
||||
import stop from "./gpSqlStop.route.js";
|
||||
export const setupGPSqlRoutes = (baseUrl: string, app: Express) => {
|
||||
//setup all the routes
|
||||
// Apply auth to entire router
|
||||
const router = Router();
|
||||
router.use(requireAuth);
|
||||
|
||||
router.use(start);
|
||||
router.use(stop);
|
||||
router.use(restart);
|
||||
|
||||
app.use(`${baseUrl}/api/system/gpSql`, router);
|
||||
};
|
||||
155
backend/gpSql/gpSqlConnection.controller.ts
Normal file
155
backend/gpSql/gpSqlConnection.controller.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import sql from "mssql";
|
||||
import { gpSqlConfig } from "../configs/gpSql.config.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { checkHostnamePort } from "../utils/checkHost.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
|
||||
export let pool2: sql.ConnectionPool;
|
||||
export let connected: boolean = false;
|
||||
export let reconnecting = false;
|
||||
|
||||
export const connectGPSql = async () => {
|
||||
const serverUp = await checkHostnamePort(`USMCD1VMS011:1433`);
|
||||
if (!serverUp) {
|
||||
// we will try to reconnect
|
||||
connected = false;
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "GP server is offline or unreachable.",
|
||||
});
|
||||
}
|
||||
|
||||
// if we are trying to click restart from the api for some reason we want to kick back and say no
|
||||
if (connected) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "The Sql server is already connected.",
|
||||
});
|
||||
}
|
||||
|
||||
// try to connect to the sql server
|
||||
try {
|
||||
pool2 = new sql.ConnectionPool(gpSqlConfig);
|
||||
await pool2.connect();
|
||||
connected = true;
|
||||
return returnFunc({
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: `${gpSqlConfig.server} is connected to ${gpSqlConfig.database}`,
|
||||
data: [],
|
||||
notify: false,
|
||||
});
|
||||
} catch (error) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "Failed to connect to the prod sql server.",
|
||||
data: [error],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const closePool = async () => {
|
||||
if (!connected) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "There is no connection to the prod server currently.",
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await pool2.close();
|
||||
connected = false;
|
||||
return returnFunc({
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "The sql connection has been closed.",
|
||||
});
|
||||
} catch (error) {
|
||||
connected = false;
|
||||
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "There was an error closing the sql connection",
|
||||
data: [error],
|
||||
});
|
||||
}
|
||||
};
|
||||
export const reconnectToSql = async () => {
|
||||
const log = createLogger({
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
});
|
||||
if (reconnecting) return;
|
||||
|
||||
//set reconnecting to true while we try to reconnect
|
||||
reconnecting = true;
|
||||
|
||||
// start the delay out as 2 seconds
|
||||
let delayStart = 2000;
|
||||
let attempt = 0;
|
||||
const maxAttempts = 10;
|
||||
|
||||
while (!connected && attempt < maxAttempts) {
|
||||
attempt++;
|
||||
log.info(
|
||||
`Reconnect attempt ${attempt}/${maxAttempts} in ${delayStart / 1000}s ...`,
|
||||
);
|
||||
|
||||
await new Promise((res) => setTimeout(res, delayStart));
|
||||
|
||||
const serverUp = await checkHostnamePort(`${process.env.PROD_SERVER}:1433`);
|
||||
|
||||
if (!serverUp) {
|
||||
delayStart = Math.min(delayStart * 2, 30000); // exponential backoff until up to 30000
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
pool2 = await sql.connect(gpSqlConfig);
|
||||
reconnecting = false;
|
||||
connected = true;
|
||||
log.info(`${gpSqlConfig.server} is connected to ${gpSqlConfig.database}`);
|
||||
} catch (error) {
|
||||
delayStart = Math.min(delayStart * 2, 30000);
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "db",
|
||||
message: "Failed to reconnect to the prod sql server.",
|
||||
data: [error],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!connected) {
|
||||
log.error(
|
||||
{ notify: true },
|
||||
"Max reconnect attempts reached on the prodSql server. Stopping retries.",
|
||||
);
|
||||
|
||||
reconnecting = false;
|
||||
// TODO: exit alert someone here
|
||||
}
|
||||
};
|
||||
97
backend/gpSql/gpSqlQuery.controller.ts
Normal file
97
backend/gpSql/gpSqlQuery.controller.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import {
|
||||
connected,
|
||||
pool2,
|
||||
reconnecting,
|
||||
reconnectToSql,
|
||||
} from "./gpSqlConnection.controller.js";
|
||||
|
||||
interface SqlError extends Error {
|
||||
code?: string;
|
||||
originalError?: {
|
||||
info?: { message?: string };
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a prod query
|
||||
* just pass over the query as a string and the name of the query.
|
||||
* Query should be like below.
|
||||
* * select * from AlplaPROD_test1.dbo.table
|
||||
* You must use test1 always as it will be changed via query
|
||||
*/
|
||||
export const gpQuery = async (queryToRun: string, name: string) => {
|
||||
if (!connected) {
|
||||
reconnectToSql();
|
||||
|
||||
if (reconnecting) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
message: `The sql ${process.env.PROD_PLANT_TOKEN} is trying to reconnect already`,
|
||||
data: [],
|
||||
notify: false,
|
||||
});
|
||||
} else {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
message: `${process.env.PROD_PLANT_TOKEN} is not connected, and failed to connect.`,
|
||||
data: [],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//change to the correct server
|
||||
const query = queryToRun.replaceAll(
|
||||
"test1",
|
||||
`${process.env.PROD_PLANT_TOKEN}`,
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await pool2.request().query(query);
|
||||
return {
|
||||
success: true,
|
||||
message: `Query results for: ${name}`,
|
||||
data: result.recordset ?? [],
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
const err = error as SqlError;
|
||||
if (err.code === "ETIMEOUT") {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} did not run due to a timeout.`,
|
||||
notify: false,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (err.code === "EREQUEST") {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} encountered an error ${err.originalError?.info?.message || "undefined error"}`,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
subModule: "gpSql",
|
||||
level: "error",
|
||||
message: `${name} encountered an unknown error.`,
|
||||
data: [],
|
||||
});
|
||||
}
|
||||
};
|
||||
29
backend/gpSql/gpSqlQuerySelector.utils.ts
Normal file
29
backend/gpSql/gpSqlQuerySelector.utils.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
|
||||
export type SqlGPQuery = {
|
||||
query: string;
|
||||
success: boolean;
|
||||
message: string;
|
||||
};
|
||||
|
||||
export const sqlGpQuerySelector = (name: string) => {
|
||||
try {
|
||||
const queryFile = readFileSync(
|
||||
new URL(`../gpSql/queries/${name}.sql`, import.meta.url),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Query for: ${name}`,
|
||||
query: queryFile,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"Error getting the query file, please make sure you have the correct name.",
|
||||
};
|
||||
}
|
||||
};
|
||||
23
backend/gpSql/gpSqlRestart.route.ts
Normal file
23
backend/gpSql/gpSqlRestart.route.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { closePool, connectGPSql } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/restart", async (_, res) => {
|
||||
await closePool();
|
||||
|
||||
await new Promise((r) => setTimeout(r, 2000));
|
||||
|
||||
const connect = await connectGPSql();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: "Sql Server has been restarted",
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
export default r;
|
||||
20
backend/gpSql/gpSqlStart.route.ts
Normal file
20
backend/gpSql/gpSqlStart.route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { connectGPSql } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/start", async (_, res) => {
|
||||
const connect = await connectGPSql();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: connect.message,
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
20
backend/gpSql/gpSqlStop.route.ts
Normal file
20
backend/gpSql/gpSqlStop.route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { closePool } from "./gpSqlConnection.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/stop", async (_, res) => {
|
||||
const connect = await closePool();
|
||||
apiReturn(res, {
|
||||
success: connect.success,
|
||||
level: connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "prodSql",
|
||||
message: connect.message,
|
||||
data: connect.data,
|
||||
status: connect.success ? 200 : 400,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
39
backend/gpSql/queries/reqCheck.sql
Normal file
39
backend/gpSql/queries/reqCheck.sql
Normal file
@@ -0,0 +1,39 @@
|
||||
USE [ALPLA]
|
||||
|
||||
SELECT Distinct r.[POPRequisitionNumber] as req,
|
||||
r.[ApprovalStatus] as approvalStatus,
|
||||
r.[Requested By] requestedBy,
|
||||
format(t.[Created Date], 'yyyy-MM-dd') as createdAt,
|
||||
format(r.[Requisition Date], 'MM/dd/yyyy') as expectedDate,
|
||||
r.[Requisition Amount] as glAccount,
|
||||
case when r.[Account Segment 2] is null or r.[Account Segment 2] = '' then '999' else cast(r.[Account Segment 2] as varchar) end as plant
|
||||
,t.Status as status
|
||||
,t.[Document Status] as docStatus
|
||||
,t.[Workflow Status] as reqState
|
||||
,CASE
|
||||
WHEN [Workflow Status] = 'Completed'
|
||||
THEN 'Pending APO convertion'
|
||||
WHEN [Workflow Status] = 'Pending User Action'
|
||||
AND r.[ApprovalStatus] = 'Pending Approval'
|
||||
THEN 'Pending plant approver'
|
||||
WHEN [Workflow Status] = ''
|
||||
AND r.[ApprovalStatus] = 'Not Submitted'
|
||||
THEN 'Req not submited'
|
||||
ELSE 'Unknown reason'
|
||||
END AS approvedStatus
|
||||
|
||||
FROM [dbo].[PORequisitions] r (nolock)
|
||||
|
||||
|
||||
|
||||
left join
|
||||
[dbo].[PurchaseRequisitions] as t (nolock) on
|
||||
t.[Requisition Number] = r.[POPRequisitionNumber]
|
||||
|
||||
|
||||
--where ApprovalStatus = 'Pending Approval'
|
||||
--and [Account Segment 2] = 80
|
||||
|
||||
where r.POPRequisitionNumber in ([reqsToCheck])
|
||||
|
||||
Order By r.POPRequisitionNumber
|
||||
@@ -1,5 +1,4 @@
|
||||
import build from "pino-abstract-transport";
|
||||
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { logs } from "../db/schema/logs.schema.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
90
backend/logger/logger.controller.ts
Normal file
90
backend/logger/logger.controller.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { Writable } from "node:stream";
|
||||
|
||||
import pino, { type Logger } from "pino";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { logs } from "../db/schema/logs.schema.js";
|
||||
import { emitToRoom } from "../socket.io/roomEmitter.socket.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
//import build from "pino-abstract-transport";
|
||||
|
||||
export const logLevel = process.env.LOG_LEVEL || "info";
|
||||
|
||||
const pinoLogLevels: Record<number, string> = {
|
||||
10: "trace",
|
||||
20: "debug",
|
||||
30: "info",
|
||||
40: "warn",
|
||||
50: "error",
|
||||
60: "fatal",
|
||||
};
|
||||
|
||||
// ✅ Custom DB writable stream
|
||||
const dbStream = new Writable({
|
||||
objectMode: true,
|
||||
async write(chunk, _enc, callback) {
|
||||
try {
|
||||
const obj = JSON.parse(chunk.toString());
|
||||
|
||||
const levelName = pinoLogLevels[obj.level] || "unknown";
|
||||
|
||||
const res = await tryCatch(
|
||||
db
|
||||
.insert(logs)
|
||||
.values({
|
||||
level: levelName,
|
||||
module: obj?.module?.toLowerCase(),
|
||||
subModule: obj?.subModule?.toLowerCase(),
|
||||
hostname: obj?.hostname?.toLowerCase(),
|
||||
message: obj.msg,
|
||||
stack: obj?.stack,
|
||||
})
|
||||
.returning(),
|
||||
);
|
||||
|
||||
if (res.error) {
|
||||
console.error(res.error);
|
||||
}
|
||||
|
||||
if (obj.room) {
|
||||
emitToRoom(obj.room, res.data ? res.data[0] : obj);
|
||||
}
|
||||
emitToRoom("logs", res.data ? res.data[0] : obj);
|
||||
callback();
|
||||
} catch (err) {
|
||||
console.error("DB log insert error:", err);
|
||||
callback();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const rootLogger: Logger = pino(
|
||||
{
|
||||
level: logLevel,
|
||||
redact: { paths: ["email", "password"], remove: true },
|
||||
},
|
||||
pino.multistream([
|
||||
{
|
||||
level: logLevel,
|
||||
stream: pino.transport({
|
||||
target: "pino-pretty",
|
||||
options: {
|
||||
colorize: true,
|
||||
singleLine: true,
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
level: logLevel,
|
||||
stream: dbStream,
|
||||
},
|
||||
]),
|
||||
);
|
||||
/**
|
||||
*
|
||||
*
|
||||
* example data to put in as a reference
|
||||
* rooms logs | labels | etc
|
||||
*/
|
||||
export const createLogger = (bindings: Record<string, unknown>): Logger => {
|
||||
return rootLogger.child(bindings);
|
||||
};
|
||||
58
backend/middleware/auth.middleware.ts
Normal file
58
backend/middleware/auth.middleware.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { fromNodeHeaders } from "better-auth/node";
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
import { auth } from "../utils/auth.utils.js";
|
||||
|
||||
declare global {
|
||||
namespace Express {
|
||||
interface Request {
|
||||
user?: {
|
||||
id: string;
|
||||
email?: string;
|
||||
roles?: string | null | undefined; //Record<string, string[]>;
|
||||
username?: string | null | undefined;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// function toWebHeaders(nodeHeaders: Request["headers"]): Headers {
|
||||
// const h = new Headers();
|
||||
// for (const [key, value] of Object.entries(nodeHeaders)) {
|
||||
// if (Array.isArray(value)) {
|
||||
// value.forEach((v) => h.append(key, v));
|
||||
// } else if (value !== undefined) {
|
||||
// h.set(key, value);
|
||||
// }
|
||||
// }
|
||||
// return h;
|
||||
// }
|
||||
|
||||
export const requireAuth = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
) => {
|
||||
try {
|
||||
const session = await auth.api.getSession({
|
||||
headers: fromNodeHeaders(req.headers),
|
||||
//query: { disableCookieCache: true },
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return res.status(401).json({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
//console.log(session);
|
||||
|
||||
req.user = {
|
||||
id: session.user.id,
|
||||
email: session.user.email,
|
||||
roles: session.user.role,
|
||||
username: session.user.username,
|
||||
};
|
||||
|
||||
next();
|
||||
} catch {
|
||||
return res.status(401).json({ error: "Unauthorized" });
|
||||
}
|
||||
};
|
||||
52
backend/middleware/auth.requiredPerms.middleware.ts
Normal file
52
backend/middleware/auth.requiredPerms.middleware.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
import { auth } from "../utils/auth.utils.js";
|
||||
|
||||
type PermissionMap = Record<string, string[]>;
|
||||
|
||||
declare global {
|
||||
namespace Express {
|
||||
interface Request {
|
||||
authz?: {
|
||||
success: boolean;
|
||||
permissions: PermissionMap;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeRoles(roles: unknown): string {
|
||||
if (Array.isArray(roles)) return roles.join(",");
|
||||
if (typeof roles === "string") return roles;
|
||||
return "";
|
||||
}
|
||||
|
||||
export function requirePermission(permissions: PermissionMap) {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const role = normalizeRoles(req.user?.roles) as any;
|
||||
|
||||
const result = await auth.api.userHasPermission({
|
||||
body: {
|
||||
role,
|
||||
permissions,
|
||||
},
|
||||
});
|
||||
|
||||
req.authz = {
|
||||
success: !!result?.success,
|
||||
permissions,
|
||||
};
|
||||
|
||||
if (!result?.success) {
|
||||
return res.status(403).json({
|
||||
ok: false,
|
||||
message: "You do not have permission to perform this action.",
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
};
|
||||
}
|
||||
37
backend/middleware/featureActive.middleware.ts
Normal file
37
backend/middleware/featureActive.middleware.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { settings } from "../db/schema/settings.schema.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
/**
|
||||
*
|
||||
* @param moduleName name of the module we are checking if is enabled or not.
|
||||
*/
|
||||
export const featureCheck = (moduleName: string) => {
|
||||
// get the features from the settings
|
||||
|
||||
return async (_req: Request, res: Response, next: NextFunction) => {
|
||||
const { data: sData, error: sError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(settings)
|
||||
.where(
|
||||
and(
|
||||
eq(settings.settingType, "feature"),
|
||||
eq(settings.name, moduleName),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
if (sError) {
|
||||
return res.status(403).json({ error: "Internal Error" });
|
||||
}
|
||||
|
||||
if (!sData?.length || !sData[0]?.active) {
|
||||
return res.status(403).json({ error: "Feature disabled" });
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
113
backend/notification/notification.alplapurchase.ts
Normal file
113
backend/notification/notification.alplapurchase.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notifications } from "../db/schema/notifications.schema.js";
|
||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { sendEmail } from "../utils/sendEmail.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
const func = async (data: any, emails: string) => {
|
||||
// get the actual notification as items will be updated between intervals if no one touches
|
||||
const { data: l, error: le } = (await tryCatch(
|
||||
db.select().from(notifications).where(eq(notifications.id, data.id)),
|
||||
)) as any;
|
||||
|
||||
if (le) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `${data.name} encountered an error while trying to get initial info`,
|
||||
data: [le],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
// search the query db for the query by name
|
||||
const sqlQuery = sqlQuerySelector(`${data.name}`) as SqlQuery;
|
||||
// create the ignore audit logs ids
|
||||
const ignoreIds = l[0].options[0]?.auditId
|
||||
? `${l[0].options[0]?.auditId}`
|
||||
: "0";
|
||||
|
||||
// run the check
|
||||
const { data: queryRun, error } = await tryCatch(
|
||||
prodQuery(
|
||||
sqlQuery.query
|
||||
.replace("[intervalCheck]", l[0].interval)
|
||||
.replace("[ignoreList]", ignoreIds),
|
||||
`Running notification query: ${l[0].name}`,
|
||||
),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: [error],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (queryRun.data.length > 0) {
|
||||
// update the latest audit id
|
||||
const { error: dbe } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({ options: [{ auditId: `${queryRun.data[0].id}` }] })
|
||||
.where(eq(notifications.id, data.id)),
|
||||
);
|
||||
|
||||
if (dbe) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: [dbe],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
// send the email
|
||||
|
||||
const sentEmail = await sendEmail({
|
||||
email: emails,
|
||||
subject: "Alert! Label Reprinted",
|
||||
template: "reprintLabels",
|
||||
context: {
|
||||
items: queryRun.data,
|
||||
},
|
||||
});
|
||||
|
||||
if (!sentEmail?.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "email",
|
||||
subModule: "notification",
|
||||
message: `${l[0].name} failed to send the email`,
|
||||
data: [sentEmail],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.log("doing nothing as there is nothing to do.");
|
||||
}
|
||||
// TODO send the error to systemAdmin users so they do not always need to be on the notifications.
|
||||
// these errors are defined per notification.
|
||||
};
|
||||
|
||||
export default func;
|
||||
153
backend/notification/notification.controller.ts
Normal file
153
backend/notification/notification.controller.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notifications } from "../db/schema/notifications.schema.js";
|
||||
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { minutesToCron } from "../utils/croner.minConvert.js";
|
||||
import { createCronJob, stopCronJob } from "../utils/croner.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const log = createLogger({ module: "notifications", subModule: "start" });
|
||||
|
||||
export const startNotifications = async () => {
|
||||
// get active notification
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db.select().from(notifications).where(eq(notifications.active, true)),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
log.error(
|
||||
{ error: error },
|
||||
"There was an error when getting notifications.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data) {
|
||||
if (data.length === 0) {
|
||||
log.info(
|
||||
{},
|
||||
"There are know currently active notifications to start up.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// get the subs and see if we have any subs currently so we can fire up the notification
|
||||
const { data: sub, error: subError } = await tryCatch(
|
||||
db.select().from(notificationSub),
|
||||
);
|
||||
|
||||
if (subError) {
|
||||
log.error(
|
||||
{ error: error },
|
||||
"There was an error when getting subscriptions.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (sub.length === 0) {
|
||||
log.info({}, "There are know currently active subscriptions.");
|
||||
return;
|
||||
}
|
||||
|
||||
const emailString = [
|
||||
...new Set(
|
||||
sub.flatMap((e) =>
|
||||
e.emails?.map((email) => email.trim().toLowerCase()),
|
||||
),
|
||||
),
|
||||
].join(";");
|
||||
|
||||
for (const n of data) {
|
||||
createCronJob(
|
||||
n.name,
|
||||
minutesToCron(parseInt(n.interval ?? "15", 10)),
|
||||
async () => {
|
||||
try {
|
||||
const { default: runFun } = await import(
|
||||
`./notification.${n.name.trim()}.js`
|
||||
);
|
||||
await runFun(n, emailString);
|
||||
} catch (error) {
|
||||
log.error(
|
||||
{ error: error },
|
||||
"There was an error starting the notification",
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const modifiedNotification = async (id: string) => {
|
||||
// when a notifications subscribed to, updated, deleted we want to get the info and rerun the startup on the single notification.
|
||||
const { data, error } = await tryCatch(
|
||||
db.select().from(notifications).where(eq(notifications.id, id)),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
log.error(
|
||||
{ error: error },
|
||||
"There was an error when getting notifications.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data) {
|
||||
if (!data[0]?.active) {
|
||||
stopCronJob(data[0]?.name ?? "");
|
||||
return;
|
||||
}
|
||||
|
||||
// get the subs for the specific id as we only want to up the modified one
|
||||
const { data: sub, error: subError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notificationSub)
|
||||
.where(eq(notificationSub.notificationId, id)),
|
||||
);
|
||||
|
||||
if (subError) {
|
||||
log.error(
|
||||
{ error: error },
|
||||
"There was an error when getting subscriptions.",
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (sub.length === 0) {
|
||||
log.info({}, "There are know currently active subscriptions.");
|
||||
stopCronJob(data[0]?.name ?? "");
|
||||
return;
|
||||
}
|
||||
|
||||
const emailString = [
|
||||
...new Set(
|
||||
sub.flatMap((e) =>
|
||||
e.emails?.map((email) => email.trim().toLowerCase()),
|
||||
),
|
||||
),
|
||||
].join(";");
|
||||
|
||||
createCronJob(
|
||||
data[0].name,
|
||||
minutesToCron(parseInt(data[0].interval ?? "15", 10)),
|
||||
async () => {
|
||||
try {
|
||||
const { default: runFun } = await import(
|
||||
`./notification.${data[0]?.name.trim()}.js`
|
||||
);
|
||||
await runFun(data[0], emailString);
|
||||
} catch (error) {
|
||||
log.error(
|
||||
{ error: error },
|
||||
"There was an error starting the notification",
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
115
backend/notification/notification.reprintLabels.ts
Normal file
115
backend/notification/notification.reprintLabels.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notifications } from "../db/schema/notifications.schema.js";
|
||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { sendEmail } from "../utils/sendEmail.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const reprint = async (data: any, emails: string) => {
|
||||
// TODO: do the actual logic for the notification.
|
||||
const { data: l, error: le } = (await tryCatch(
|
||||
db.select().from(notifications).where(eq(notifications.id, data.id)),
|
||||
)) as any;
|
||||
|
||||
if (le) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `${data.name} encountered an error while trying to get initial info`,
|
||||
data: [le],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
// search the query db for the query by name
|
||||
const sqlQuery = sqlQuerySelector(`${data.name}`) as SqlQuery;
|
||||
// create the ignore audit logs ids
|
||||
const ignoreIds = l[0].options[0]?.auditId
|
||||
? `${l[0].options[0]?.auditId}`
|
||||
: "0";
|
||||
|
||||
// run the check
|
||||
const { data: queryRun, error } = await tryCatch(
|
||||
prodQuery(
|
||||
sqlQuery.query
|
||||
.replace("[intervalCheck]", l[0].interval)
|
||||
.replace("[ignoreList]", ignoreIds),
|
||||
`Running notification query: ${l[0].name}`,
|
||||
),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: [error],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (queryRun.data.length > 0) {
|
||||
// update the latest audit id
|
||||
const { error: dbe } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set({ options: [{ auditId: `${queryRun.data[0].id}` }] })
|
||||
.where(eq(notifications.id, data.id)),
|
||||
);
|
||||
|
||||
if (dbe) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "query",
|
||||
message: `Data for: ${l[0].name} encountered an error while trying to get it`,
|
||||
data: [dbe],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
|
||||
// send the email
|
||||
|
||||
const sentEmail = await sendEmail({
|
||||
email: emails,
|
||||
subject: "Alert! Label Reprinted",
|
||||
template: "reprintLabels",
|
||||
context: {
|
||||
items: queryRun.data,
|
||||
},
|
||||
});
|
||||
|
||||
if (!sentEmail?.success) {
|
||||
// sendEmail({
|
||||
// email: "Blake.matths@alpla.com",
|
||||
// subject: `${os.hostname()} failed to run ${data[0]?.name}.`,
|
||||
// template: "serverCrash",
|
||||
// context: {
|
||||
// error: sentEmail?.data,
|
||||
// plant: `${os.hostname()}`,
|
||||
// },
|
||||
// });
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "email",
|
||||
message: `${l[0].name} failed to send the email`,
|
||||
data: [sentEmail?.data],
|
||||
notify: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default reprint;
|
||||
55
backend/notification/notification.route.ts
Normal file
55
backend/notification/notification.route.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { type Response, Router } from "express";
|
||||
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notifications } from "../db/schema/notifications.schema.js";
|
||||
import { auth } from "../utils/auth.utils.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.get("/", async (req, res: Response) => {
|
||||
const hasPermissions = await auth.api.userHasPermission({
|
||||
body: {
|
||||
//userId: req?.user?.id,
|
||||
role: req.user?.roles as any,
|
||||
permissions: {
|
||||
notifications: ["readAll"], // This must match the structure in your access control
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const { data: nName, error: nError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(
|
||||
!hasPermissions.success ? eq(notifications.active, true) : undefined,
|
||||
)
|
||||
.orderBy(notifications.name),
|
||||
);
|
||||
|
||||
if (nError) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "get",
|
||||
message: `There was an error getting the notifications `,
|
||||
data: [nError],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "get",
|
||||
message: `All current notifications`,
|
||||
data: nName ?? [],
|
||||
status: 200,
|
||||
});
|
||||
});
|
||||
export default r;
|
||||
20
backend/notification/notification.routes.ts
Normal file
20
backend/notification/notification.routes.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { Express } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import getNotifications from "./notification.route.js";
|
||||
import updateNote from "./notification.update.route.js";
|
||||
import deleteSub from "./notificationSub.delete.route.js";
|
||||
import subs from "./notificationSub.get.route.js";
|
||||
import newSub from "./notificationSub.post.route.js";
|
||||
import updateSub from "./notificationSub.update.route.js";
|
||||
|
||||
export const setupNotificationRoutes = (baseUrl: string, app: Express) => {
|
||||
//stats will be like this as we dont need to change this
|
||||
app.use(`${baseUrl}/api/notification`, requireAuth, getNotifications);
|
||||
app.use(`${baseUrl}/api/notification`, requireAuth, updateNote);
|
||||
app.use(`${baseUrl}/api/notification/sub`, requireAuth, subs);
|
||||
app.use(`${baseUrl}/api/notification/sub`, requireAuth, newSub);
|
||||
app.use(`${baseUrl}/api/notification/sub`, requireAuth, updateSub);
|
||||
app.use(`${baseUrl}/api/notification/sub`, requireAuth, deleteSub);
|
||||
|
||||
// all other system should be under /api/system/*
|
||||
};
|
||||
81
backend/notification/notification.update.route.ts
Normal file
81
backend/notification/notification.update.route.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { type Response, Router } from "express";
|
||||
import z from "zod";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notifications } from "../db/schema/notifications.schema.js";
|
||||
import { requirePermission } from "../middleware/auth.requiredPerms.middleware.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { modifiedNotification } from "./notification.controller.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
const updateNote = z.object({
|
||||
description: z.string().optional(),
|
||||
active: z.boolean().optional(),
|
||||
interval: z.string().optional(),
|
||||
options: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
});
|
||||
|
||||
r.patch(
|
||||
"/:id",
|
||||
requirePermission({ notifications: ["update"] }),
|
||||
async (req, res: Response) => {
|
||||
const { id } = req.params;
|
||||
|
||||
try {
|
||||
const validated = updateNote.parse(req.body);
|
||||
|
||||
const { data: nName, error: nError } = await tryCatch(
|
||||
db
|
||||
.update(notifications)
|
||||
.set(validated)
|
||||
.where(eq(notifications.id, id as string))
|
||||
.returning(),
|
||||
);
|
||||
|
||||
await modifiedNotification(id as string);
|
||||
|
||||
if (nError) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "update",
|
||||
message: `There was an error getting the notifications `,
|
||||
data: [nError],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "update",
|
||||
message: `Notification was updated`,
|
||||
data: nName ?? [],
|
||||
status: 200,
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof z.ZodError) {
|
||||
const flattened = z.flattenError(err);
|
||||
// return res.status(400).json({
|
||||
// error: "Validation failed",
|
||||
// details: flattened,
|
||||
// });
|
||||
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "notification",
|
||||
message: "Validation failed",
|
||||
data: [flattened.fieldErrors],
|
||||
status: 400, //connect.success ? 200 : 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
export default r;
|
||||
103
backend/notification/notificationSub.delete.route.ts
Normal file
103
backend/notification/notificationSub.delete.route.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { type Response, Router } from "express";
|
||||
import z from "zod";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||
import { auth } from "../utils/auth.utils.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { modifiedNotification } from "./notification.controller.js";
|
||||
|
||||
const newSubscribe = z.object({
|
||||
userId: z.string().describe("User id."),
|
||||
notificationId: z.string().describe("Notification id"),
|
||||
});
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.delete("/", async (req, res: Response) => {
|
||||
const hasPermissions = await auth.api.userHasPermission({
|
||||
body: {
|
||||
//userId: req?.user?.id,
|
||||
role: req.user?.roles as any,
|
||||
permissions: {
|
||||
notifications: ["readAll"], // This must match the structure in your access control
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const validated = newSubscribe.parse(req.body);
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.delete(notificationSub)
|
||||
.where(
|
||||
and(
|
||||
eq(
|
||||
notificationSub.userId,
|
||||
hasPermissions ? validated.userId : (req?.user?.id ?? ""),
|
||||
), // allows the admin to delete this
|
||||
//eq(notificationSub.userId, req?.user?.id ?? ""),
|
||||
eq(notificationSub.notificationId, validated.notificationId),
|
||||
),
|
||||
)
|
||||
.returning(),
|
||||
);
|
||||
|
||||
await modifiedNotification(validated.notificationId);
|
||||
|
||||
if (error) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `There was an error deleting the subscription `,
|
||||
data: [error],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (data.length <= 0) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `Subscription was not deleted invalid data sent over`,
|
||||
data: data ?? [],
|
||||
status: 200,
|
||||
});
|
||||
}
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `Subscription deleted`,
|
||||
data: data ?? [],
|
||||
status: 200,
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof z.ZodError) {
|
||||
const flattened = z.flattenError(err);
|
||||
// return res.status(400).json({
|
||||
// error: "Validation failed",
|
||||
// details: flattened,
|
||||
// });
|
||||
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "notification",
|
||||
message: "Validation failed",
|
||||
data: [flattened.fieldErrors],
|
||||
status: 400, //connect.success ? 200 : 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
export default r;
|
||||
61
backend/notification/notificationSub.get.route.ts
Normal file
61
backend/notification/notificationSub.get.route.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { type Response, Router } from "express";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||
import { auth } from "../utils/auth.utils.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.get("/", async (req, res: Response) => {
|
||||
const { userId } = req.query;
|
||||
|
||||
const hasPermissions = await auth.api.userHasPermission({
|
||||
body: {
|
||||
//userId: req?.user?.id,
|
||||
role: req.user?.roles as any,
|
||||
permissions: {
|
||||
notifications: ["readAll"], // This must match the structure in your access control
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (userId) {
|
||||
hasPermissions.success = false;
|
||||
}
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(notificationSub)
|
||||
.where(
|
||||
!hasPermissions.success
|
||||
? eq(notificationSub.userId, `${req?.user?.id ?? ""}`)
|
||||
: undefined,
|
||||
),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `There was an error getting subscriptions `,
|
||||
data: [error],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `Subscriptions`,
|
||||
data: data ?? [],
|
||||
status: 200,
|
||||
});
|
||||
});
|
||||
export default r;
|
||||
92
backend/notification/notificationSub.post.route.ts
Normal file
92
backend/notification/notificationSub.post.route.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { type Response, Router } from "express";
|
||||
import z from "zod";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { modifiedNotification } from "./notification.controller.js";
|
||||
|
||||
const newSubscribe = z.object({
|
||||
emails: z
|
||||
.email()
|
||||
.array()
|
||||
|
||||
.describe("An array of emails"),
|
||||
userId: z.string().describe("User id."),
|
||||
notificationId: z
|
||||
.string()
|
||||
|
||||
.describe("Notification id"),
|
||||
});
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/", async (req, res: Response) => {
|
||||
try {
|
||||
const validated = newSubscribe.parse(req.body);
|
||||
|
||||
const emails = validated.emails
|
||||
.map((e) => e.trim().toLowerCase())
|
||||
.filter(Boolean);
|
||||
|
||||
const uniqueEmails = [...new Set(emails)];
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.insert(notificationSub)
|
||||
.values({
|
||||
userId: req?.user?.id ?? "",
|
||||
notificationId: validated.notificationId,
|
||||
emails: uniqueEmails,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [notificationSub.userId, notificationSub.notificationId],
|
||||
set: { emails: uniqueEmails },
|
||||
})
|
||||
.returning(),
|
||||
);
|
||||
|
||||
await modifiedNotification(validated.notificationId);
|
||||
|
||||
if (error) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `There was an error getting the notifications `,
|
||||
data: [error],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "post",
|
||||
message: `Subscribed to notification`,
|
||||
data: data ?? [],
|
||||
status: 200,
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof z.ZodError) {
|
||||
const flattened = z.flattenError(err);
|
||||
// return res.status(400).json({
|
||||
// error: "Validation failed",
|
||||
// details: flattened,
|
||||
// });
|
||||
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "notification",
|
||||
message: "Validation failed",
|
||||
data: [flattened.fieldErrors],
|
||||
status: 400, //connect.success ? 200 : 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
export default r;
|
||||
84
backend/notification/notificationSub.update.route.ts
Normal file
84
backend/notification/notificationSub.update.route.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { type Response, Router } from "express";
|
||||
import z from "zod";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { modifiedNotification } from "./notification.controller.js";
|
||||
|
||||
const newSubscribe = z.object({
|
||||
emails: z.email().array().describe("An array of emails"),
|
||||
userId: z.string().describe("User id."),
|
||||
notificationId: z.string().describe("Notification id"),
|
||||
});
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.patch("/", async (req, res: Response) => {
|
||||
try {
|
||||
const validated = newSubscribe.parse(req.body);
|
||||
|
||||
const emails = validated.emails
|
||||
.map((e) => e.trim().toLowerCase())
|
||||
.filter(Boolean);
|
||||
|
||||
const uniqueEmails = [...new Set(emails)];
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.update(notificationSub)
|
||||
.set({ emails: uniqueEmails })
|
||||
.where(
|
||||
and(
|
||||
eq(notificationSub.userId, validated.userId),
|
||||
eq(notificationSub.notificationId, validated.notificationId),
|
||||
),
|
||||
)
|
||||
.returning(),
|
||||
);
|
||||
|
||||
await modifiedNotification(validated.notificationId);
|
||||
|
||||
if (error) {
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "notification",
|
||||
subModule: "update",
|
||||
message: `There was an error updating the notifications `,
|
||||
data: [error],
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "notification",
|
||||
subModule: "update",
|
||||
message: `Subscription updated`,
|
||||
data: data ?? [],
|
||||
status: 200,
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof z.ZodError) {
|
||||
const flattened = z.flattenError(err);
|
||||
// return res.status(400).json({
|
||||
// error: "Validation failed",
|
||||
// details: flattened,
|
||||
// });
|
||||
|
||||
return apiReturn(res, {
|
||||
success: false,
|
||||
level: "error", //connect.success ? "info" : "error",
|
||||
module: "routes",
|
||||
subModule: "notification",
|
||||
message: "Validation failed",
|
||||
data: [flattened.fieldErrors],
|
||||
status: 400, //connect.success ? 200 : 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
export default r;
|
||||
70
backend/notification/notifications.master.ts
Normal file
70
backend/notification/notifications.master.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import {
|
||||
type NewNotification,
|
||||
notifications,
|
||||
} from "../db/schema/notifications.schema.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const note: NewNotification[] = [
|
||||
{
|
||||
name: "reprintLabels",
|
||||
description:
|
||||
"Monitors the labels that are printed and returns a there data, if one falls withing the time frame.",
|
||||
active: false,
|
||||
interval: "10",
|
||||
options: [{ auditId: [0] }],
|
||||
},
|
||||
{
|
||||
name: "qualityBlocking",
|
||||
description:
|
||||
"Checks for new blocking orders that have been entered, recommend to get the most recent order in here before activating.",
|
||||
active: false,
|
||||
interval: "10",
|
||||
options: [{ sentBlockingOrders: [{ timeStamp: "0", blockingOrder: 1 }] }],
|
||||
},
|
||||
{
|
||||
name: "alplaPurchaseHistory",
|
||||
description:
|
||||
"Will check the alpla purchase data for any changes, if the req has not been sent already then we will send this, for a po or fresh order we will ignore. ",
|
||||
active: false,
|
||||
interval: "5",
|
||||
options: [
|
||||
{ sentReqs: [{ timeStamp: "0", req: 1, approved: false }] },
|
||||
{ sentAPOs: [{ timeStamp: "0", apo: 1 }] },
|
||||
{ sentRCT: [{ timeStamp: "0", rct: 1 }] },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export const createNotifications = async () => {
|
||||
const log = createLogger({ module: "notifications", subModule: "create" });
|
||||
const { data, error } = await tryCatch(
|
||||
db
|
||||
.insert(notifications)
|
||||
.values(note)
|
||||
.onConflictDoUpdate({
|
||||
target: notifications.name,
|
||||
set: {
|
||||
description: sql`excluded.description`,
|
||||
},
|
||||
// where: sql`
|
||||
// settings.seed_version IS NULL
|
||||
// OR settings.seed_version < excluded.seed_version
|
||||
// `,
|
||||
})
|
||||
.returning(),
|
||||
);
|
||||
|
||||
if (error) {
|
||||
log.error(
|
||||
{ error: error },
|
||||
"There was an error when adding or updating the notifications.",
|
||||
);
|
||||
}
|
||||
|
||||
if (data) {
|
||||
log.info({}, "All notifications were added/updated");
|
||||
}
|
||||
};
|
||||
36
backend/ocp/ocp.printer.listener.ts
Normal file
36
backend/ocp/ocp.printer.listener.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* the route that listens for the printers post.
|
||||
*
|
||||
* and http-post alert should be setup on each printer pointing to at min you will want to make the alert for
|
||||
* pause printer, you can have all on here as it will also monitor and do things on all messages
|
||||
*
|
||||
* http://{serverIP}:2222/lst/api/ocp/printer/listener/{printerName}
|
||||
*
|
||||
* the messages will be sent over to the db for logging as well as specific ones will do something
|
||||
*
|
||||
* pause will validate if can print
|
||||
* close head will repause the printer so it wont print a label
|
||||
* power up will just repause the printer so it wont print a label
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.post("/printer/listener/:printer", async (req, res) => {
|
||||
const { printer: printerName } = req.params;
|
||||
console.log(req.body);
|
||||
|
||||
return apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "ocp",
|
||||
subModule: "printing",
|
||||
message: `${printerName} just passed over a message`,
|
||||
data: req.body ?? [],
|
||||
status: 200,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
19
backend/ocp/ocp.printer.manage.ts
Normal file
19
backend/ocp/ocp.printer.manage.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* this will do a prod sync, update or add alerts to the printer, validate the next pm intervale as well as head replacement.
|
||||
*
|
||||
* if a printer is upcoming on a pm or head replacement send to the plant to address.
|
||||
*
|
||||
* a trigger on the printer table will have the ability to run this as well
|
||||
*
|
||||
* heat beats on all assigned printers
|
||||
*
|
||||
* printer status will live here this will be how we manage all the levels of status like 3 paused, 1 printing, 8 error, 10 power up, etc...
|
||||
*/
|
||||
|
||||
export const printerManager = async () => {};
|
||||
|
||||
export const printerHeartBeat = async () => {
|
||||
// heat heats will be defaulted to 60 seconds no reason to allow anything else
|
||||
};
|
||||
|
||||
//export const printerStatus = async (statusNr: number, printerId: number) => {};
|
||||
22
backend/ocp/ocp.routes.ts
Normal file
22
backend/ocp/ocp.routes.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { type Express, Router } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import { featureCheck } from "../middleware/featureActive.middleware.js";
|
||||
import listener from "./ocp.printer.listener.js";
|
||||
|
||||
export const setupOCPRoutes = (baseUrl: string, app: Express) => {
|
||||
//setup all the routes
|
||||
const router = Router();
|
||||
|
||||
// is the feature even on?
|
||||
router.use(featureCheck("ocp"));
|
||||
|
||||
// non auth routes up here
|
||||
router.use(listener);
|
||||
|
||||
// auth routes below here
|
||||
router.use(requireAuth);
|
||||
|
||||
//router.use("");
|
||||
|
||||
app.use(`${baseUrl}/api/ocp`, router);
|
||||
};
|
||||
393
backend/opendock/openDockRreleaseMonitor.utils.ts
Normal file
393
backend/opendock/openDockRreleaseMonitor.utils.ts
Normal file
@@ -0,0 +1,393 @@
|
||||
import axios from "axios";
|
||||
import { addHours } from "date-fns";
|
||||
import { formatInTimeZone } from "date-fns-tz";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { opendockApt } from "../db/schema/opendock.schema.js";
|
||||
import { settings } from "../db/schema/settings.schema.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import { createCronJob } from "../utils/croner.utils.js";
|
||||
import { delay } from "../utils/delay.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { getToken, odToken } from "./opendock.utils.js";
|
||||
|
||||
type Releases = {
|
||||
ReleaseNumber: number;
|
||||
DeliveryState: number;
|
||||
DeliveryDate: Date;
|
||||
LineItemHumanReadableId: number;
|
||||
ArticleAlias: string;
|
||||
LoadingUnits: string;
|
||||
Quantity: number;
|
||||
LineItemArticleWeight: number;
|
||||
CustomerReleaseNumber: string;
|
||||
};
|
||||
const timeZone = process.env.TIMEZONE as string;
|
||||
const TWENTY_FOUR_HOURS = 24 * 60 * 60 * 1000;
|
||||
const log = createLogger({ module: "opendock", subModule: "releaseMonitor" });
|
||||
|
||||
// making the cron more safe when it comes to buffer stuff
|
||||
let opendockSyncRunning = false;
|
||||
|
||||
let lastCheck = formatInTimeZone(
|
||||
new Date().toISOString(),
|
||||
timeZone,
|
||||
"yyyy-MM-dd HH:mm:ss",
|
||||
);
|
||||
|
||||
// const lastCheck = formatInTimeZone(
|
||||
// new Date().toISOString(),
|
||||
// `America/New_York`, //TODO: Pull timezone from the .env last as process.env.TIME_ZONE is not working so need to figure itout
|
||||
// "yyyy-MM-dd HH:mm:ss",
|
||||
// );
|
||||
|
||||
//const queue: unknown[] = [];
|
||||
//const isProcessing: boolean = false;
|
||||
|
||||
// const parseDbDate = (value: string | Date) => {
|
||||
// if (value instanceof Date) return value;
|
||||
|
||||
// // normalize "2026-04-08 13:10:43.280" -> "2026-04-08T13:10:43.280"
|
||||
// const normalized = value.replace(" ", "T");
|
||||
|
||||
// // interpret that wall-clock time as America/New_York
|
||||
// return fromZonedTime(normalized, timeZone);
|
||||
// };
|
||||
|
||||
const postRelease = async (release: Releases) => {
|
||||
if (!odToken.odToken) {
|
||||
log.info({}, "Getting Auth Token");
|
||||
await getToken();
|
||||
}
|
||||
|
||||
if (
|
||||
new Date(odToken.tokenDate || Date.now()).getTime() <
|
||||
Date.now() - TWENTY_FOUR_HOURS
|
||||
) {
|
||||
log.info({}, "Refreshing Auth Token");
|
||||
await getToken();
|
||||
}
|
||||
/**
|
||||
* ReleaseState
|
||||
* 0 = open
|
||||
* 1 = planned
|
||||
* 2 = CustomCanceled
|
||||
* 4 = internally canceled
|
||||
*/
|
||||
|
||||
/**
|
||||
* DeliveryState
|
||||
* 0 = open
|
||||
* 1 = inprogress
|
||||
* 2 = loading
|
||||
* 3 = partly shipped
|
||||
* 4 = delivered
|
||||
*/
|
||||
|
||||
const newDockApt = {
|
||||
status:
|
||||
release.DeliveryState === 0 || release.DeliveryState === 1
|
||||
? "Scheduled"
|
||||
: release.DeliveryState === 2
|
||||
? "InProgress"
|
||||
: release.DeliveryState === 3 // this will consider finished and if a correction needs made to the bol we need to cancel and reactivate the order
|
||||
? "Completed"
|
||||
: release.DeliveryState === 4 && "Completed",
|
||||
userId: process.env.DEFAULT_CARRIER, // this should be the carrierid
|
||||
loadTypeId: process.env.DEFAULT_LOAD_TYPE, // well get this and make it a default one
|
||||
dockId: process.env.DEFAULT_DOCK, // this the warehouse we want it in to start out
|
||||
refNumbers: [release.ReleaseNumber],
|
||||
//refNumber: release.ReleaseNumber,
|
||||
start: release.DeliveryDate,
|
||||
end: addHours(release.DeliveryDate, 1),
|
||||
notes: "",
|
||||
ccEmails: [""],
|
||||
muteNotifications: true,
|
||||
metadata: {
|
||||
externalValidationFailed: false,
|
||||
externalValidationErrorMessage: null,
|
||||
},
|
||||
units: null,
|
||||
customFields: [
|
||||
{
|
||||
name: "strArticle",
|
||||
type: "str",
|
||||
label: "Article",
|
||||
value: `${release.LineItemHumanReadableId} - ${release.ArticleAlias}`,
|
||||
description: "What bottle are we sending ",
|
||||
placeholder: "",
|
||||
dropDownValues: [],
|
||||
minLengthOrValue: 1,
|
||||
hiddenFromCarrier: false,
|
||||
requiredForCarrier: false,
|
||||
requiredForWarehouse: false,
|
||||
},
|
||||
{
|
||||
name: "intPallet Count",
|
||||
type: "int",
|
||||
label: "Pallet Count",
|
||||
value: parseInt(release.LoadingUnits, 10), // do we really want to update this if its partial load as it should have been the full amount?
|
||||
description: "How many pallets",
|
||||
placeholder: "22",
|
||||
dropDownValues: [],
|
||||
minLengthOrValue: 1,
|
||||
hiddenFromCarrier: false,
|
||||
requiredForCarrier: false,
|
||||
requiredForWarehouse: false,
|
||||
},
|
||||
{
|
||||
name: "strTotal Weight",
|
||||
type: "str",
|
||||
label: "Total Weight",
|
||||
value: `${(((release.Quantity * release.LineItemArticleWeight) / 1000) * 2.20462).toFixed(2)}`,
|
||||
description: "What is the total weight of the load",
|
||||
placeholder: "",
|
||||
dropDownValues: [],
|
||||
minLengthOrValue: 1,
|
||||
hiddenFromCarrier: false,
|
||||
requiredForCarrier: false,
|
||||
requiredForWarehouse: false,
|
||||
},
|
||||
{
|
||||
name: "strCustomer ReleaseNumber",
|
||||
type: "str",
|
||||
label: "Customer Release Number",
|
||||
value: `${release.CustomerReleaseNumber}`,
|
||||
description: "What is the customer release number",
|
||||
placeholder: "",
|
||||
dropDownValues: [],
|
||||
minLengthOrValue: 1,
|
||||
hiddenFromCarrier: false,
|
||||
requiredForCarrier: false,
|
||||
requiredForWarehouse: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// TODO: pull the current added releases from the db and if one matches then we want to get its id and run the update vs create
|
||||
const { data: existingApt, error: aptError } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(opendockApt)
|
||||
.where(eq(opendockApt.release, release.ReleaseNumber))
|
||||
.limit(1),
|
||||
);
|
||||
if (aptError) {
|
||||
log.error({ error: aptError }, "Error getting apt data");
|
||||
// TODO: send an error email on this one as it will cause issues
|
||||
return;
|
||||
}
|
||||
|
||||
const existing = existingApt[0];
|
||||
|
||||
//console.log(releaseCheck);
|
||||
|
||||
if (existing) {
|
||||
const id = existing.openDockAptId;
|
||||
try {
|
||||
const response = await axios.patch(
|
||||
`${process.env.OPENDOCK_URL}/appointment/${id}`,
|
||||
newDockApt,
|
||||
{
|
||||
headers: {
|
||||
"content-type": "application/json; charset=utf-8",
|
||||
Authorization: `Bearer ${odToken.odToken}`,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (response.status === 400) {
|
||||
log.error({}, response.data.data.message);
|
||||
return;
|
||||
}
|
||||
|
||||
// update the release in the db leaving as insert just incase something weird happened
|
||||
try {
|
||||
await db
|
||||
.insert(opendockApt)
|
||||
.values({
|
||||
release: release.ReleaseNumber,
|
||||
openDockAptId: response.data.data.id,
|
||||
appointment: response.data.data,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: opendockApt.release,
|
||||
set: {
|
||||
openDockAptId: response.data.data.id,
|
||||
appointment: response.data.data,
|
||||
upd_date: sql`NOW()`,
|
||||
},
|
||||
})
|
||||
.returning();
|
||||
|
||||
log.info({}, `${release.ReleaseNumber} was updated`);
|
||||
} catch (e) {
|
||||
log.error(
|
||||
{ error: e },
|
||||
`Error updating the release: ${release.ReleaseNumber}`,
|
||||
);
|
||||
}
|
||||
// biome-ignore lint/suspicious/noExplicitAny: to many possibilities
|
||||
} catch (e: any) {
|
||||
//console.info(newDockApt);
|
||||
log.error(
|
||||
{ error: e.response.data },
|
||||
`An error has occurred during patching of the release: ${release.ReleaseNumber}`,
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`${process.env.OPENDOCK_URL}/appointment`,
|
||||
newDockApt,
|
||||
{
|
||||
headers: {
|
||||
"content-type": "application/json; charset=utf-8",
|
||||
Authorization: `Bearer ${odToken.odToken}`,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// we need the id,release#,status from this response, store it in lst, check if we have a release so we can just update it.
|
||||
// this will be utilized when we are listening for the changes to the apts. that way we can update the state to arrived. we will run our own checks on this guy during the incoming messages.
|
||||
|
||||
if (response.status === 400) {
|
||||
log.error({}, response.data.data.message);
|
||||
return;
|
||||
}
|
||||
|
||||
// the response to make it simple we want response.data.id, response.data.relNumber, status will be defaulted to Scheduled if we created it here.
|
||||
// TODO: add this release data to our db. but save it in json format and well parse it out. that way we future proof it and have everything in here vs just a few things
|
||||
//console.info(response.data.data, "Was Created");
|
||||
try {
|
||||
await db
|
||||
.insert(opendockApt)
|
||||
.values({
|
||||
release: release.ReleaseNumber,
|
||||
openDockAptId: response.data.data.id,
|
||||
appointment: response.data.data,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: opendockApt.release,
|
||||
set: {
|
||||
openDockAptId: response.data.data.id,
|
||||
appointment: response.data.data,
|
||||
upd_date: sql`NOW()`,
|
||||
},
|
||||
})
|
||||
.returning();
|
||||
|
||||
log.info({}, `${release.ReleaseNumber} was created`);
|
||||
} catch (e) {
|
||||
log.error({ error: e }, "Error creating new release");
|
||||
}
|
||||
// biome-ignore lint/suspicious/noExplicitAny: to many possibilities
|
||||
} catch (e: any) {
|
||||
log.error(
|
||||
{ error: e?.response?.data },
|
||||
"Error posting new release to opendock",
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
await delay(750); // rate limit protection
|
||||
};
|
||||
|
||||
export const monitorReleaseChanges = async () => {
|
||||
// TODO: validate if the setting for opendocks is active and start / stop the system based on this
|
||||
// if it changes we set to false and the next loop will stop.
|
||||
|
||||
const openDockMonitor = await db
|
||||
.select()
|
||||
.from(settings)
|
||||
.where(eq(settings.name, "opendock_sync"));
|
||||
// console.info("Starting release monitor", lastCheck);
|
||||
|
||||
const sqlQuery = sqlQuerySelector(`releaseChecks`) as SqlQuery;
|
||||
|
||||
if (!sqlQuery.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "datamart",
|
||||
subModule: "query",
|
||||
message: `Error getting releaseChecks info`,
|
||||
data: [sqlQuery.message],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
|
||||
if (openDockMonitor[0]?.active) {
|
||||
// const BUFFER_MS =
|
||||
// Math.floor(parseInt(openDockMonitor[0]?.value, 10) || 30) * 1.5 * 1000; // this should be >= to the interval we set in the cron TODO: should pull the buffer from the setting and give it an extra 10% then round to nearest int.
|
||||
|
||||
createCronJob(
|
||||
"opendock_sync",
|
||||
`*/${parseInt(openDockMonitor[0]?.value, 10) || 30} * * * * *`,
|
||||
async () => {
|
||||
if (opendockSyncRunning) {
|
||||
log.warn(
|
||||
{},
|
||||
"Skipping opendock_sync because previous run is still active",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
opendockSyncRunning = true;
|
||||
try {
|
||||
// set this to the latest time.
|
||||
|
||||
const result = await prodQuery(
|
||||
sqlQuery.query.replace("[dateCheck]", `'${lastCheck}'`),
|
||||
"Get release info",
|
||||
);
|
||||
|
||||
log.debug(
|
||||
{ lastCheck },
|
||||
`${result.data.length} Changes to a release have been made`,
|
||||
);
|
||||
|
||||
if (result.data.length) {
|
||||
for (const release of result.data) {
|
||||
await postRelease(release);
|
||||
|
||||
// add a 2 seconds to account for a massive influx of orders and when we dont finish in 1 go it wont try to grab the same amount again
|
||||
const nDate = new Date(release.Upd_Date);
|
||||
nDate.setSeconds(nDate.getSeconds() + 2);
|
||||
|
||||
lastCheck = formatInTimeZone(
|
||||
nDate.toISOString(),
|
||||
"UTC",
|
||||
"yyyy-MM-dd HH:mm:ss",
|
||||
);
|
||||
log.debug({ lastCheck }, "Changes to a release have been made");
|
||||
await delay(500);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(
|
||||
{ error: e },
|
||||
"Error occurred while running the monitor job",
|
||||
);
|
||||
log.error(
|
||||
{ error: e },
|
||||
"Error occurred while running the monitor job",
|
||||
);
|
||||
} finally {
|
||||
opendockSyncRunning = false;
|
||||
}
|
||||
},
|
||||
"monitorReleaseChanges",
|
||||
);
|
||||
}
|
||||
};
|
||||
19
backend/opendock/opendock.routes.ts
Normal file
19
backend/opendock/opendock.routes.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { type Express, Router } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import { featureCheck } from "../middleware/featureActive.middleware.js";
|
||||
import getApt from "./opendockGetRelease.route.js";
|
||||
|
||||
export const setupOpendockRoutes = (baseUrl: string, app: Express) => {
|
||||
//setup all the routes
|
||||
// Apply auth to entire router
|
||||
const router = Router();
|
||||
|
||||
// is the feature even on?
|
||||
router.use(featureCheck("opendock_sync"));
|
||||
|
||||
// we need to make sure we are authenticated to see the releases
|
||||
router.use(requireAuth);
|
||||
|
||||
router.use(getApt);
|
||||
app.use(`${baseUrl}/api/opendock`, router);
|
||||
};
|
||||
35
backend/opendock/opendock.utils.ts
Normal file
35
backend/opendock/opendock.utils.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import axios from "axios";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
|
||||
type ODToken = {
|
||||
odToken: string | null;
|
||||
tokenDate: Date | null;
|
||||
};
|
||||
|
||||
export let odToken: ODToken = {
|
||||
odToken: null,
|
||||
tokenDate: new Date(),
|
||||
};
|
||||
|
||||
export const getToken = async () => {
|
||||
const log = createLogger({ module: "opendock", subModule: "releaseMonitor" });
|
||||
try {
|
||||
const { status, data } = await axios.post(
|
||||
`${process.env.OPENDOCK_URL}/auth/login`,
|
||||
{
|
||||
email: "blake.matthes@alpla.com",
|
||||
password: process.env.OPENDOCK_PASSWORD,
|
||||
},
|
||||
);
|
||||
|
||||
if (status === 400) {
|
||||
log.error(data.message);
|
||||
return;
|
||||
}
|
||||
|
||||
odToken = { odToken: data.access_token, tokenDate: new Date() };
|
||||
log.info({ odToken }, "Token added");
|
||||
} catch (e) {
|
||||
log.error({ error: e }, "Error getting/refreshing token");
|
||||
}
|
||||
};
|
||||
40
backend/opendock/opendockGetRelease.route.ts
Normal file
40
backend/opendock/opendockGetRelease.route.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { desc, gte, sql } from "drizzle-orm";
|
||||
import { Router } from "express";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import { opendockApt } from "../db/schema/opendock.schema.js";
|
||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const r = Router();
|
||||
|
||||
r.get("/", async (_, res) => {
|
||||
//const limit
|
||||
|
||||
const daysCreated = 30;
|
||||
|
||||
const { data } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(opendockApt)
|
||||
.where(
|
||||
gte(
|
||||
opendockApt.createdAt,
|
||||
sql.raw(`NOW() - INTERVAL '${daysCreated} days'`),
|
||||
),
|
||||
)
|
||||
.orderBy(desc(opendockApt.createdAt))
|
||||
.limit(500),
|
||||
);
|
||||
|
||||
apiReturn(res, {
|
||||
success: true,
|
||||
level: "info",
|
||||
module: "opendock",
|
||||
subModule: "apt",
|
||||
message: `The first ${data?.length} Apt(s) that were created in the last ${daysCreated} `,
|
||||
data: data ?? [],
|
||||
status: 200,
|
||||
});
|
||||
});
|
||||
|
||||
export default r;
|
||||
69
backend/opendock/opendockSocketMonitor.utils.ts
Normal file
69
backend/opendock/opendockSocketMonitor.utils.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { io, type Socket } from "socket.io-client";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { systemSettings } from "../server.js";
|
||||
import { getToken, odToken } from "./opendock.utils.js";
|
||||
|
||||
const log = createLogger({ module: "opendock", subModule: "releaseMonitor" });
|
||||
const TWENTY_FOUR_HOURS = 24 * 60 * 60 * 1000;
|
||||
let socket: Socket | null = null;
|
||||
export const opendockSocketMonitor = async () => {
|
||||
// checking if we actaully want to run this
|
||||
if (!systemSettings.filter((n) => n.name === "opendock_sync")[0]?.active) {
|
||||
log.info({}, "Opendock is not active");
|
||||
}
|
||||
|
||||
if (!odToken.odToken) {
|
||||
log.info({}, "Getting Auth Token");
|
||||
await getToken();
|
||||
}
|
||||
|
||||
if (
|
||||
new Date(odToken.tokenDate || Date.now()).getTime() <
|
||||
Date.now() - TWENTY_FOUR_HOURS
|
||||
) {
|
||||
log.info({}, "Refreshing Auth Token");
|
||||
await getToken();
|
||||
}
|
||||
const baseSubspaceUrl = "wss://subspace.staging.opendock.com";
|
||||
const url = `${baseSubspaceUrl}?token=${odToken.odToken}`;
|
||||
socket = io(url, { transports: ["websocket"] }); // Enforce 'websocket' transport only.
|
||||
|
||||
socket.on("connect", () => {
|
||||
console.log("Connected");
|
||||
});
|
||||
|
||||
// socket.on("heartbeat", (data) => {
|
||||
// console.log(data);
|
||||
// });
|
||||
|
||||
socket.on("create-Appointment", () => {
|
||||
//console.log("appt create:", data);
|
||||
});
|
||||
|
||||
socket.on("update-Appointment", () => {
|
||||
//console.log("appt update:", data);
|
||||
});
|
||||
|
||||
socket.on("error", (data) => {
|
||||
console.log("Error:", data);
|
||||
});
|
||||
|
||||
// socket.onAny((event, ...args) => {
|
||||
// console.log("Received event:", event, args);
|
||||
// });
|
||||
};
|
||||
|
||||
export const killOpendockSocket = () => {
|
||||
if (!socket) {
|
||||
console.log("No active socket to kill");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("🛑 Killing socket connection...");
|
||||
|
||||
socket.removeAllListeners(); // optional but clean
|
||||
socket.disconnect();
|
||||
socket = null;
|
||||
|
||||
console.log("✅ Socket killed");
|
||||
};
|
||||
17
backend/prodSql/prodSql.routes.ts
Normal file
17
backend/prodSql/prodSql.routes.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { type Express, Router } from "express";
|
||||
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||
import restart from "./prodSqlRestart.route.js";
|
||||
import start from "./prodSqlStart.route.js";
|
||||
import stop from "./prodSqlStop.route.js";
|
||||
export const setupProdSqlRoutes = (baseUrl: string, app: Express) => {
|
||||
//setup all the routes
|
||||
// Apply auth to entire router
|
||||
const router = Router();
|
||||
router.use(requireAuth);
|
||||
|
||||
router.use(start);
|
||||
router.use(stop);
|
||||
router.use(restart);
|
||||
|
||||
app.use(`${baseUrl}/api/system/prodSql`, router);
|
||||
};
|
||||
@@ -35,7 +35,8 @@ export const connectProdSql = async () => {
|
||||
|
||||
// try to connect to the sql server
|
||||
try {
|
||||
pool = await sql.connect(prodSqlConfig);
|
||||
pool = new sql.ConnectionPool(prodSqlConfig);
|
||||
await pool.connect();
|
||||
connected = true;
|
||||
return returnFunc({
|
||||
success: true,
|
||||
@@ -151,6 +152,6 @@ export const reconnectToSql = async () => {
|
||||
);
|
||||
|
||||
reconnecting = false;
|
||||
// exit alert someone here
|
||||
// TODO: exit alert someone here
|
||||
}
|
||||
};
|
||||
@@ -1,6 +1,5 @@
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import {
|
||||
closePool,
|
||||
connected,
|
||||
pool,
|
||||
reconnecting,
|
||||
@@ -59,12 +58,11 @@ export const prodQuery = async (queryToRun: string, name: string) => {
|
||||
return {
|
||||
success: true,
|
||||
message: `Query results for: ${name}`,
|
||||
data: result.recordset,
|
||||
data: result.recordset ?? [],
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
const err = error as SqlError;
|
||||
if (err.code === "ETIMEOUT") {
|
||||
closePool();
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
@@ -77,7 +75,6 @@ export const prodQuery = async (queryToRun: string, name: string) => {
|
||||
}
|
||||
|
||||
if (err.code === "EREQUEST") {
|
||||
closePool();
|
||||
return returnFunc({
|
||||
success: false,
|
||||
module: "system",
|
||||
29
backend/prodSql/prodSqlQuerySelector.utils.ts
Normal file
29
backend/prodSql/prodSqlQuerySelector.utils.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
|
||||
export type SqlQuery = {
|
||||
query: string;
|
||||
success: boolean;
|
||||
message: string;
|
||||
};
|
||||
|
||||
export const sqlQuerySelector = (name: string) => {
|
||||
try {
|
||||
const queryFile = readFileSync(
|
||||
new URL(`../prodSql/queries/${name}.sql`, import.meta.url),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Query for: ${name}`,
|
||||
query: queryFile,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
"Error getting the query file, please make sure you have the correct name.",
|
||||
};
|
||||
}
|
||||
};
|
||||
208
backend/prodSql/queries/activeArticles.sql
Normal file
208
backend/prodSql/queries/activeArticles.sql
Normal file
@@ -0,0 +1,208 @@
|
||||
use AlplaPROD_test1
|
||||
|
||||
SELECT V_Artikel.IdArtikelvarianten,
|
||||
V_Artikel.Bezeichnung,
|
||||
V_Artikel.ArtikelvariantenTypBez,
|
||||
V_Artikel.PreisEinheitBez,
|
||||
case when sales.price is null then 0 else sales.price end as salesPrice,
|
||||
TypeOfMaterial=CASE
|
||||
WHEN
|
||||
V_Artikel.ArtikelvariantenTypBez LIKE'%Additive'
|
||||
Then 'AD'
|
||||
when V_Artikel.ArtikelvariantenTypBez Like '%Masterbatch'
|
||||
THEN 'MB'
|
||||
WHEN V_Artikel.ArtikelvariantenTypBez ='Pallet' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Top' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Bags' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Bag' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Stretch Wrap' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Stretch Film' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Banding Materials' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Carton' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Re-Shipper Box' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Label' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Pallet Label' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Carton Label' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Liner' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Dose Cup' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Metal Cage' or
|
||||
V_Artikel.ArtikelvariantenTypBez ='Spout' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Slip Sheet' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Palet' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'LID' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'Metal' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'Corner post' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'Bottle Label' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Paper label' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Banding' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Glue' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Top Frame' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'IML Label' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Purch EBM Bottle' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Purchased Spout' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Gaylord' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Misc. Packaging' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Sleeve' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Plastic Bag' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Purch Spout' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Seal' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Tape' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Box' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Label IML' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Pallet Runner'
|
||||
THEN 'PKG'
|
||||
WHEN V_Artikel.ArtikelvariantenTypBez='HD-PE' or
|
||||
V_Artikel.ArtikelvariantenTypBez='HD-PE PCR' or
|
||||
V_Artikel.ArtikelvariantenTypBez='HD-PP' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PP' or
|
||||
V_Artikel.ArtikelvariantenTypBez LIKE '%PCR' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PP' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PET' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PET-P' or
|
||||
V_Artikel.ArtikelvariantenTypBez= 'PET-G'
|
||||
THEN 'MM'
|
||||
WHEN
|
||||
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or
|
||||
V_Artikel.ArtikelvariantenTypBez='$Waste Container' or
|
||||
V_Artikel.ArtikelvariantenTypBez='Mixed-Waste' or
|
||||
V_Artikel.ArtikelvariantenTypBez LIKE'%-Waste%'
|
||||
THEN 'Waste'
|
||||
WHEN
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Bottle' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'SBM Bottle' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'EBM Bottle' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'ISBM Bottle' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Decorated Bottle'
|
||||
THEN 'Bottle'
|
||||
WHEN V_Artikel.ArtikelvariantenTypBez = 'Preform'
|
||||
Then 'Preform'
|
||||
When
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Purchased Preform' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Purchased Caps' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Purchased_preform'
|
||||
THEN 'Purchased_preform'
|
||||
When
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Closures' or
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Cap'
|
||||
THEN 'Caps'
|
||||
When
|
||||
V_Artikel.ArtikelvariantenTypBez = 'Dummy'
|
||||
THEN 'Not used'
|
||||
ELSE 'Item not defined' END
|
||||
,V_Artikel.IdArtikelvariantenTyp,
|
||||
Round(V_Artikel.ArtikelGewicht, 3) as Article_Weight,
|
||||
IdAdresse,
|
||||
AdressBez,
|
||||
AdressTypBez,
|
||||
ProdBereichBez,
|
||||
FG=case when
|
||||
V_Artikel.ProdBereichBez = 'SBM' or
|
||||
V_Artikel.ProdBereichBez = 'IM-Caps' or
|
||||
V_Artikel.ProdBereichBez = 'IM-PET' or
|
||||
V_Artikel.ProdBereichBez = 'PRINT OFFICE' or
|
||||
V_Artikel.ProdBereichBez = 'EBM' or
|
||||
V_Artikel.ProdBereichBez = 'ISBM' or
|
||||
V_Artikel.ProdBereichBez = 'IM-Finishing'
|
||||
Then 'FG'
|
||||
Else 'not Defined Profit Center'
|
||||
end,
|
||||
V_Artikel.Umlaeufe as num_of_cycles,
|
||||
V_FibuKonten_BASIS.FibuKontoNr as CostsCenterId,
|
||||
V_FibuKonten_BASIS.Bezeichnung as CostCenterDescription,
|
||||
sales.[KdArtNr] as CustomerArticleNumber,
|
||||
sales.[KdArtBez] as CustomerArticleDescription,
|
||||
round(V_Artikel.Zyklus, 2) as CycleTime,
|
||||
Sypronummer as salesAgreement,
|
||||
V_Artikel.ProdArtikelBez as ProductFamily
|
||||
--,REPLACE(pur.UOM,'UOM:','')
|
||||
,Case when LEFT(
|
||||
LTRIM(REPLACE(pur.UOM,'UOM:','')),
|
||||
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
|
||||
) is null then '1' else LEFT(
|
||||
LTRIM(REPLACE(pur.UOM,'UOM:','')),
|
||||
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
|
||||
) end AS UOM
|
||||
|
||||
--,*
|
||||
FROM dbo.V_Artikel (nolock)
|
||||
|
||||
join
|
||||
dbo.V_Artikelvarianten (nolock) on dbo.V_Artikel.IdArtikelvarianten =
|
||||
dbo.V_Artikelvarianten.IdArtikelvarianten
|
||||
|
||||
join
|
||||
dbo.V_FibuKonten_BASIS (nolock) on dbo.V_Artikelvarianten.IdFibuKonto =
|
||||
dbo.V_FibuKonten_BASIS.IdFibuKonto
|
||||
|
||||
|
||||
-- adding in the sales price
|
||||
left join
|
||||
(select * from
|
||||
(select
|
||||
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
|
||||
IdArtikelvarianten as av
|
||||
,GueltigabDatum as validDate
|
||||
,VKPreis as price
|
||||
,[KdArtNr]
|
||||
,[KdArtBez]
|
||||
--,*
|
||||
from dbo.T_HistoryVK (nolock)
|
||||
where
|
||||
--GueltigabDatum > getDate() - 120
|
||||
--and
|
||||
Aktiv = 1
|
||||
and StandardKunde = 1 -- default address
|
||||
) a
|
||||
where RN = 1) as sales
|
||||
on dbo.V_Artikel.IdArtikelvarianten = sales.av
|
||||
|
||||
/* adding the purchase price info */
|
||||
left join
|
||||
(select * from
|
||||
(select
|
||||
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
|
||||
IdArtikelvarianten as av
|
||||
,GueltigabDatum as validDate
|
||||
,EKPreis as price
|
||||
,LiefArtNr as supplierNr
|
||||
--,CASE
|
||||
-- WHEN Bemerkung IS NOT NULL AND Bemerkung LIKE '%UOM:%'
|
||||
-- THEN
|
||||
-- -- incase there is something funny going on in the remark well jsut check for new lines and what not
|
||||
-- LEFT(
|
||||
-- REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' '),
|
||||
-- CASE
|
||||
-- WHEN CHARINDEX(' ', REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' ')) > 0
|
||||
-- THEN CHARINDEX(' ', REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' ')) - 1
|
||||
-- ELSE LEN(Bemerkung)
|
||||
-- END
|
||||
-- )
|
||||
-- ELSE 'UOM:1'
|
||||
-- END AS UOM
|
||||
,CASE
|
||||
WHEN Bemerkung IS NOT NULL AND Bemerkung LIKE '%UOM:%'
|
||||
THEN
|
||||
LTRIM(
|
||||
SUBSTRING(
|
||||
Bemerkung,
|
||||
CHARINDEX('UOM:', UPPER(Bemerkung)) + LEN('UOM:'),
|
||||
LEN(Bemerkung)
|
||||
)
|
||||
)
|
||||
ELSE
|
||||
'UOM:1'
|
||||
END AS UOM
|
||||
,Bemerkung
|
||||
--,*
|
||||
from dbo.T_HistoryEK (nolock)
|
||||
where
|
||||
StandardLieferant = 1 -- default address
|
||||
) a
|
||||
where RN = 1) as pur
|
||||
on dbo.V_Artikel.IdArtikelvarianten = pur.av
|
||||
|
||||
where V_Artikel.aktiv = 1 --and dbo.V_Artikel.IdArtikelvarianten = 1445
|
||||
|
||||
order by V_Artikel.IdArtikelvarianten /*, TypeOfMaterial */
|
||||
63
backend/prodSql/queries/alplapurchase.sql
Normal file
63
backend/prodSql/queries/alplapurchase.sql
Normal file
@@ -0,0 +1,63 @@
|
||||
use AlplaPROD_test1
|
||||
declare @intervalCheck as int = '[interval]'
|
||||
|
||||
/*
|
||||
Monitors alpla purchase for thing new. this will not update unless the order status is updated.
|
||||
this means if a user just reopens the order it will update but everything changed in the position will not be updated until the user reorders or cancels the po
|
||||
*/
|
||||
|
||||
select
|
||||
IdBestellung as apo
|
||||
,po.revision as revision
|
||||
,po.Bestaetigt as confirmed
|
||||
,po.status
|
||||
,case po.Status
|
||||
when 1 then 'Created'
|
||||
when 2 then 'Ordered'
|
||||
when 22 then 'Reopened'
|
||||
when 11 then 'Reopened'
|
||||
when 4 then 'Planned'
|
||||
when 5 then 'Partly Delivered'
|
||||
when 6 then 'Delivered'
|
||||
when 7 then 'Canceled'
|
||||
when 8 then 'Closed'
|
||||
else 'Unknown' end as statusText
|
||||
,po.IdJournal as journalNum -- use this to validate if we used it already.
|
||||
,po.Add_User as add_user
|
||||
,po.Add_Date as add_date
|
||||
,po.Upd_User as upd_user
|
||||
,po.Upd_Date as upd_Date
|
||||
,po.Bemerkung as remark
|
||||
,po.IdJournal as journal -- use this to validate if we used it already.
|
||||
,isnull((
|
||||
select
|
||||
o.IdArtikelVarianten as av
|
||||
,a.Bezeichnung as alias
|
||||
,Lieferdatum as deliveryDate
|
||||
,cast(BestellMenge as decimal(18,2)) as qty
|
||||
,cast(BestellMengeVPK as decimal(18,0)) as pkg
|
||||
,cast(PreisProEinheit as decimal(18,0)) as price
|
||||
,PositionsStatus
|
||||
,case PositionsStatus
|
||||
when 1 then 'Created'
|
||||
when 2 then 'Ordered'
|
||||
when 22 then 'Reopened'
|
||||
when 4 then 'Planned'
|
||||
when 5 then 'Partly Delivered'
|
||||
when 6 then 'Delivered'
|
||||
when 7 then 'Canceled'
|
||||
when 8 then 'Closed'
|
||||
else 'Unknown' end as statusText
|
||||
,o.upd_user
|
||||
,o.upd_date
|
||||
from T_Bestellpositionen (nolock) as o
|
||||
|
||||
left join
|
||||
T_Artikelvarianten as a on
|
||||
a.IdArtikelvarianten = o.IdArtikelVarianten
|
||||
where o.IdBestellung = po.IdBestellung
|
||||
for json path
|
||||
), '[]') as position
|
||||
--,*
|
||||
from T_Bestellungen (nolock) as po
|
||||
where po.Upd_Date > dateadd(MINUTE, -@intervalCheck, getdate())
|
||||
74
backend/prodSql/queries/deliveryByDateRange.sql
Normal file
74
backend/prodSql/queries/deliveryByDateRange.sql
Normal file
@@ -0,0 +1,74 @@
|
||||
use [test1_AlplaPROD2.0_Read]
|
||||
|
||||
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
|
||||
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
|
||||
SELECT
|
||||
r.[ArticleHumanReadableId]
|
||||
,[ReleaseNumber]
|
||||
,h.CustomerOrderNumber
|
||||
,x.CustomerLineItemNumber
|
||||
,[CustomerReleaseNumber]
|
||||
,[ReleaseState]
|
||||
,[DeliveryState]
|
||||
,ea.JournalNummer as BOL_Number
|
||||
,[ReleaseConfirmationState]
|
||||
,[PlanningState]
|
||||
--,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
|
||||
,r.[OrderDate]
|
||||
--,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
|
||||
,r.[DeliveryDate]
|
||||
--,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
|
||||
,r.[LoadingDate]
|
||||
,[Quantity]
|
||||
,[DeliveredQuantity]
|
||||
,r.[AdditionalInformation1]
|
||||
,r.[AdditionalInformation2]
|
||||
,[TradeUnits]
|
||||
,[LoadingUnits]
|
||||
,[Trucks]
|
||||
,[LoadingToleranceType]
|
||||
,[SalesPrice]
|
||||
,[Currency]
|
||||
,[QuantityUnit]
|
||||
,[SalesPriceRemark]
|
||||
,r.[Remark]
|
||||
,[Irradiated]
|
||||
,r.[CreatedByEdi]
|
||||
,[DeliveryAddressHumanReadableId]
|
||||
,DeliveryAddressDescription
|
||||
,[CustomerArtNo]
|
||||
,[TotalPrice]
|
||||
,r.[ArticleAlias]
|
||||
|
||||
FROM [order].[Release] (nolock) as r
|
||||
|
||||
left join
|
||||
[order].LineItem as x on
|
||||
|
||||
r.LineItemId = x.id
|
||||
|
||||
left join
|
||||
[order].Header as h on
|
||||
x.HeaderId = h.id
|
||||
|
||||
--bol stuff
|
||||
left join
|
||||
AlplaPROD_test1.dbo.V_LadePlanungenLadeAuftragAbruf (nolock) as zz
|
||||
on zz.AbrufIdAuftragsAbruf = r.ReleaseNumber
|
||||
|
||||
left join
|
||||
(select * from (SELECT
|
||||
ROW_NUMBER() OVER (PARTITION BY IdJournal ORDER BY add_date DESC) AS RowNum
|
||||
,*
|
||||
FROM [AlplaPROD_test1].[dbo].[T_Lieferungen] (nolock)) x
|
||||
|
||||
where RowNum = 1) as ea on
|
||||
zz.IdLieferschein = ea.IdJournal
|
||||
|
||||
where
|
||||
--r.ArticleHumanReadableId in ([articles])
|
||||
--r.ReleaseNumber = 1452
|
||||
|
||||
r.DeliveryDate between @StartDate AND @EndDate
|
||||
and DeliveredQuantity > 0
|
||||
--and Journalnummer = 169386
|
||||
@@ -1,4 +1,4 @@
|
||||
export const prodSqlServerStats = `
|
||||
|
||||
DECLARE @UptimeSeconds INT;
|
||||
DECLARE @StartTime DATETIME;
|
||||
|
||||
@@ -13,4 +13,4 @@ SELECT
|
||||
(@UptimeSeconds % 86400) / 3600 AS [Hours],
|
||||
(@UptimeSeconds % 3600) / 60 AS [Minutes],
|
||||
(@UptimeSeconds % 60) AS [Seconds];
|
||||
`;
|
||||
|
||||
72
backend/prodSql/queries/releaseChecks.sql
Normal file
72
backend/prodSql/queries/releaseChecks.sql
Normal file
@@ -0,0 +1,72 @@
|
||||
SELECT
|
||||
[Id]
|
||||
,[ReleaseNumber]
|
||||
,[CustomerReleaseNumber]
|
||||
,[ReleaseState]
|
||||
,[LineItemId]
|
||||
,[BlanketOrderId]
|
||||
,[DeliveryState]
|
||||
,[ReleaseConfirmationState]
|
||||
,[PlanningState]
|
||||
,[OrderDate]
|
||||
,cast([DeliveryDate] as datetime2) as DeliveryDate
|
||||
,[LoadingDate]
|
||||
,[Quantity]
|
||||
,[DeliveredQuantity]
|
||||
,[DeliveredQuantityTradeUnits]
|
||||
,[DeliveredQuantityLoadingUnits]
|
||||
,[PackagingId]
|
||||
,[PackagingHumanReadableId]
|
||||
,[PackagingDescription]
|
||||
,[MainMaterialId]
|
||||
,[MainMaterialHumanReadableId]
|
||||
,[MainMaterialDescription]
|
||||
,[AdditionalInformation1]
|
||||
,[AdditionalInformation2]
|
||||
,[D365SupplierLot]
|
||||
,[TradeUnits]
|
||||
,[LoadingUnits]
|
||||
,[Trucks]
|
||||
,[LoadingToleranceType]
|
||||
,[UnderdeliveryDeviation]
|
||||
,[OverdeliveryDeviation]
|
||||
,[ArticleAccountRequirements_ArticleExact]
|
||||
,[ArticleAccountRequirements_CustomerExact]
|
||||
,[ArticleAccountRequirements_PackagingExact]
|
||||
,[ArticleAccountRequirements_MainMaterialExact]
|
||||
,[PriceLogicType]
|
||||
,[AllowProductionLotMixing]
|
||||
,[EnforceStrictPicking]
|
||||
,[SalesPrice]
|
||||
,[Currency]
|
||||
,[QuantityUnit]
|
||||
,[SalesPriceRemark]
|
||||
,[DeliveryConditionId]
|
||||
,[DeliveryConditionHumanReadableId]
|
||||
,[DeliveryConditionDescription]
|
||||
,[PaymentTermsId]
|
||||
,[PaymentTermsHumanReadableId]
|
||||
,[PaymentTermsDescription]
|
||||
,[Remark]
|
||||
,[DeliveryAddressId]
|
||||
,[DeliveryAddressHumanReadableId]
|
||||
,[DeliveryAddressDescription]
|
||||
,[DeliveryStreetName]
|
||||
,[DeliveryAddressZip]
|
||||
,[DeliveryCity]
|
||||
,[DeliveryCountry]
|
||||
,[ReleaseDiscount]
|
||||
,[CustomerArtNo]
|
||||
,[LineItemHumanReadableId]
|
||||
,[LineItemArticle]
|
||||
,[LineItemArticleWeight]
|
||||
,[LineItemQuantityType]
|
||||
,[TotalPrice]
|
||||
,[Add_User]
|
||||
,[Add_Date]
|
||||
,[Upd_User]
|
||||
,cast([Upd_Date] as dateTime) as Upd_Date
|
||||
,[VatRate]
|
||||
,[ArticleAlias]
|
||||
FROM [test1_AlplaPROD2.0_Reporting].[reporting_order].[Release] (nolock)
|
||||
where format([Upd_Date], 'yyyy-MM-dd HH:mm:ss') > [dateCheck]
|
||||
28
backend/prodSql/queries/reprintLabels.sql
Normal file
28
backend/prodSql/queries/reprintLabels.sql
Normal file
@@ -0,0 +1,28 @@
|
||||
use [test1_AlplaPROD2.0_Read]
|
||||
|
||||
SELECT
|
||||
--JSON_VALUE(content, '$.EntityId') as labelId
|
||||
a.id
|
||||
,ActorName
|
||||
,FORMAT(PrintDate, 'yyyy-MM-dd HH:mm') as printDate
|
||||
,FORMAT(CreatedDateTime, 'yyyy-MM-dd HH:mm') createdDateTime
|
||||
,l.ArticleHumanReadableId as av
|
||||
,l.ArticleDescription as alias
|
||||
,PrintedCopies
|
||||
,p.name as printerName
|
||||
,RunningNumber
|
||||
--,*
|
||||
FROM [support].[AuditLog] (nolock) as a
|
||||
|
||||
left join
|
||||
[labelling].[InternalLabel] (nolock) as l on
|
||||
l.id = JSON_VALUE(content, '$.EntityId')
|
||||
|
||||
left join
|
||||
[masterData].[printer] (nolock) as p on
|
||||
p.id = l.PrinterId
|
||||
|
||||
where message like '%reprint%'
|
||||
and CreatedDateTime > DATEADD(minute, -[intervalCheck], SYSDATETIMEOFFSET())
|
||||
and a.id > [ignoreList]
|
||||
order by CreatedDateTime desc
|
||||
117
backend/purchase/puchase.gpCheck.ts
Normal file
117
backend/purchase/puchase.gpCheck.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { gpQuery } from "../gpSql/gpSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlGPQuery,
|
||||
sqlGpQuerySelector,
|
||||
} from "../gpSql/gpSqlQuerySelector.utils.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import type { GpStatus } from "../types/purhcaseTypes.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
|
||||
const log = createLogger({ module: "purchase", subModule: "gp" });
|
||||
|
||||
export const gpReqCheck = async (data: GpStatus[]) => {
|
||||
const gpReqCheck = sqlGpQuerySelector("reqCheck") as SqlGPQuery;
|
||||
const reqs = data.map((r) => r.req.trim());
|
||||
|
||||
if (!gpReqCheck.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "purchase",
|
||||
subModule: "query",
|
||||
message: `Error getting alpla purchase info`,
|
||||
data: [gpReqCheck.message],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// check the initial req table
|
||||
const result = await gpQuery(
|
||||
gpReqCheck.query.replace(
|
||||
"[reqsToCheck]",
|
||||
data.map((r) => `'${r.req}'`).join(", ") ?? "",
|
||||
),
|
||||
"Get req info",
|
||||
);
|
||||
|
||||
log.debug(
|
||||
{},
|
||||
`There are ${result.data.length} reqs that need to be updated with there current status`,
|
||||
);
|
||||
|
||||
const firstFound = result.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const firstFoundSet = new Set(result.data.map((r) => r.req.trim()));
|
||||
|
||||
const missing1Reqs = reqs.filter((req) => !firstFoundSet.has(req));
|
||||
|
||||
//check if we have a recall on our req
|
||||
const reqCheck = await gpQuery(
|
||||
`select
|
||||
[Requisition Number] as req
|
||||
,case when [Workflow Status] = 'recall' then 'returned' else [Workflow Status] end as approvedStatus
|
||||
--,*
|
||||
from [dbo].[PurchaseRequisitions] where [Requisition Number] in (${missing1Reqs.map((r) => `'${r}'`).join(", ")})`,
|
||||
"validate req is not in recall",
|
||||
);
|
||||
|
||||
const secondFound = reqCheck.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const secondFoundSet =
|
||||
new Set(reqCheck.data.map((r) => r.req.trim())) ?? [];
|
||||
|
||||
const missing2Reqs = missing1Reqs.filter((req) => !secondFoundSet.has(req));
|
||||
|
||||
// check if we have a po already
|
||||
const apoCheck = await gpQuery(
|
||||
`select
|
||||
SOPNUMBE
|
||||
,PONUMBER
|
||||
,reqStatus='converted'
|
||||
,*
|
||||
from alpla.dbo.sop60100 (nolock) where sopnumbe in (${missing2Reqs.map((r) => `'${r}'`).join(", ")})`,
|
||||
"Get release info",
|
||||
);
|
||||
|
||||
const thirdRound = apoCheck.data.map((r) => ({
|
||||
req: r.req.trim(),
|
||||
approvedStatus: r.approvedStatus,
|
||||
}));
|
||||
|
||||
const missing3Reqs = missing2Reqs.filter((req) => !secondFoundSet.has(req));
|
||||
|
||||
// remaining just got canceled or no longer exist
|
||||
const remaining = missing3Reqs.map((m) => ({
|
||||
req: m,
|
||||
approvedStatus: "canceled",
|
||||
}));
|
||||
|
||||
const allFound = [
|
||||
...firstFound,
|
||||
...secondFound,
|
||||
...thirdRound,
|
||||
...remaining,
|
||||
];
|
||||
|
||||
const statusMap = new Map(
|
||||
allFound.map((r: any) => [r.req, r.approvedStatus]),
|
||||
);
|
||||
|
||||
const updateData = data.map((row) => ({
|
||||
id: row.id,
|
||||
//req: row.req,
|
||||
approvedStatus: statusMap.get(row.req.trim()) ?? null,
|
||||
}));
|
||||
|
||||
return updateData;
|
||||
} catch (error) {
|
||||
log.error({ stack: error });
|
||||
}
|
||||
};
|
||||
230
backend/purchase/purchase.controller.ts
Normal file
230
backend/purchase/purchase.controller.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
/**
|
||||
* This will monitor alpla purchase
|
||||
*/
|
||||
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import {
|
||||
alplaPurchaseHistory,
|
||||
type NewAlplaPurchaseHistory,
|
||||
} from "../db/schema/alplapurchase.schema.js";
|
||||
import { settings } from "../db/schema/settings.schema.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||
import {
|
||||
type SqlQuery,
|
||||
sqlQuerySelector,
|
||||
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||
import type { GpStatus, StatusUpdate } from "../types/purhcaseTypes.js";
|
||||
import { createCronJob } from "../utils/croner.utils.js";
|
||||
import { delay } from "../utils/delay.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
import { gpReqCheck } from "./puchase.gpCheck.js";
|
||||
|
||||
const log = createLogger({ module: "purchase", subModule: "purchaseMonitor" });
|
||||
|
||||
export const monitorAlplaPurchase = async () => {
|
||||
const purchaseMonitor = await db
|
||||
.select()
|
||||
.from(settings)
|
||||
.where(eq(settings.name, "purchaseMonitor"));
|
||||
|
||||
const sqlQuery = sqlQuerySelector(`alplapurchase`) as SqlQuery;
|
||||
|
||||
if (!sqlQuery.success) {
|
||||
return returnFunc({
|
||||
success: false,
|
||||
level: "error",
|
||||
module: "purchase",
|
||||
subModule: "query",
|
||||
message: `Error getting alpla purchase info`,
|
||||
data: [sqlQuery.message],
|
||||
notify: false,
|
||||
});
|
||||
}
|
||||
|
||||
if (purchaseMonitor[0]?.active) {
|
||||
createCronJob("purchaseMonitor", "0 */5 * * * *", async () => {
|
||||
try {
|
||||
const result = await prodQuery(
|
||||
sqlQuery.query.replace(
|
||||
"[interval]",
|
||||
`${purchaseMonitor[0]?.value || "5"}`,
|
||||
),
|
||||
"Get release info",
|
||||
);
|
||||
|
||||
log.debug(
|
||||
{},
|
||||
`There are ${result.data.length} pending to be updated from the last ${purchaseMonitor[0]?.value}`,
|
||||
);
|
||||
if (result.data.length) {
|
||||
const convertedData = result.data.map((i) => ({
|
||||
...i,
|
||||
position: JSON.parse(i.position),
|
||||
})) as NewAlplaPurchaseHistory;
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db.insert(alplaPurchaseHistory).values(convertedData).returning(),
|
||||
);
|
||||
|
||||
if (data) {
|
||||
log.debug(
|
||||
{ data },
|
||||
"New data was just added to alpla purchase history",
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
log.error(
|
||||
{ error },
|
||||
"There was an error adding alpla purchase history",
|
||||
);
|
||||
}
|
||||
|
||||
await delay(500);
|
||||
}
|
||||
} catch (e) {
|
||||
log.error({ error: e }, "Error occurred while running the monitor job");
|
||||
log.error({ error: e }, "Error occurred while running the monitor job");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// re-pull re-pull everything that has approvedStatus is pending
|
||||
|
||||
const { data: allReq, error: errorReq } = await tryCatch(
|
||||
db
|
||||
.select()
|
||||
.from(alplaPurchaseHistory)
|
||||
.where(eq(alplaPurchaseHistory.approvedStatus, "new")),
|
||||
);
|
||||
|
||||
// if theres no reqs just end meow
|
||||
if (errorReq) {
|
||||
log.error(
|
||||
{ stack: errorReq },
|
||||
"There was an error getting history data",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug({}, `There are ${allReq.length} pending reqs to be updated`);
|
||||
|
||||
if (!allReq.length) {
|
||||
log.debug({}, "There are not reqs to be processed");
|
||||
return;
|
||||
}
|
||||
/**
|
||||
* approvedStatus
|
||||
* remark = '' then pending req/manual po
|
||||
* pending = pending
|
||||
* approved = approved
|
||||
*
|
||||
*/
|
||||
|
||||
// the flow for all the fun stuff
|
||||
|
||||
const needsGpLookup: GpStatus[] = [];
|
||||
const updates: StatusUpdate[] = [];
|
||||
|
||||
for (const row of allReq ?? []) {
|
||||
const remark = row.remark?.toLowerCase() ?? "";
|
||||
|
||||
if (remark === "") {
|
||||
updates.push({ id: row.id, approvedStatus: "initial" });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (remark.includes("rct")) {
|
||||
updates.push({ id: row.id, approvedStatus: "received" });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (remark.includes("apo")) {
|
||||
updates.push({ id: row.id, approvedStatus: "approved" });
|
||||
continue;
|
||||
}
|
||||
|
||||
// not handled locally, defer to GP lookup
|
||||
needsGpLookup.push({ id: row.id, req: row.remark?.trim() ?? "" });
|
||||
}
|
||||
|
||||
const gpSmash = (await gpReqCheck(needsGpLookup)) as StatusUpdate[];
|
||||
|
||||
const merge = [...updates, ...gpSmash];
|
||||
|
||||
if (merge.length > 0) {
|
||||
await db.execute(sql`
|
||||
UPDATE ${alplaPurchaseHistory}
|
||||
SET approved_status = CASE
|
||||
${sql.join(
|
||||
merge.map(
|
||||
(row) =>
|
||||
sql`WHEN ${alplaPurchaseHistory.id} = ${row.id} THEN ${row.approvedStatus}`,
|
||||
),
|
||||
sql` `,
|
||||
)}
|
||||
ELSE approved_status
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE ${alplaPurchaseHistory.id} IN (
|
||||
${sql.join(
|
||||
merge.map((row) => sql`${row.id}`),
|
||||
sql`, `,
|
||||
)}
|
||||
)
|
||||
`);
|
||||
log.info(
|
||||
{},
|
||||
"All alpla purchase orders have been processed and updated",
|
||||
);
|
||||
}
|
||||
|
||||
// for reqs, create a string of reqs then run them through the gp req table to see there status. then update in lst ass see fit.
|
||||
|
||||
// then double check if we have all reqs covered, for the reqs missing from above restring them and check the po table
|
||||
|
||||
// these ones will be called to as converted to po
|
||||
|
||||
// for the remaining reqs from above check the actual req table to see the status of it if the workflow is set at Recall this means a change was requested from purchasing team and needs to be re approved
|
||||
|
||||
// for all remaining reqs we change them to replace/canceled
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// const updates = (allReq ?? [])
|
||||
// .map((row) => {
|
||||
// const remark = row.remark?.toLowerCase() ?? "";
|
||||
|
||||
// let approvedStatus: string | null = null;
|
||||
|
||||
// // priority order matters here
|
||||
// if (remark === "") {
|
||||
// approvedStatus = "initial";
|
||||
// } else if (remark.includes("rct")) {
|
||||
// approvedStatus = "received";
|
||||
// } else if (remark.includes("apo")) {
|
||||
// approvedStatus = "approved";
|
||||
// }
|
||||
|
||||
// // add your next 4 checks here
|
||||
// // else if (...) approvedStatus = "somethingElse";
|
||||
|
||||
// if (!approvedStatus) return null;
|
||||
|
||||
// return {
|
||||
// id: row.id,
|
||||
// approvedStatus,
|
||||
// };
|
||||
// })
|
||||
// .filter(
|
||||
// (
|
||||
// row,
|
||||
// ): row is {
|
||||
// id: string;
|
||||
// approvedStatus: string;
|
||||
// } => row !== null,
|
||||
// );
|
||||
188
backend/rfid/daytonConfig copy.json
Normal file
188
backend/rfid/daytonConfig copy.json
Normal file
@@ -0,0 +1,188 @@
|
||||
{
|
||||
"GPIO-LED": {
|
||||
"GPODefaults": {
|
||||
"1": "HIGH",
|
||||
"2": "HIGH",
|
||||
"3": "HIGH",
|
||||
"4": "HIGH"
|
||||
},
|
||||
"LEDDefaults": {
|
||||
"3": "GREEN"
|
||||
},
|
||||
"TAG_READ": [
|
||||
{
|
||||
"pin": 1,
|
||||
"state": "HIGH",
|
||||
"type": "GPO"
|
||||
}
|
||||
]
|
||||
},
|
||||
"READER-GATEWAY": {
|
||||
"batching": [
|
||||
{
|
||||
"maxPayloadSizePerReport": 256000,
|
||||
"reportingInterval": 2000
|
||||
}
|
||||
],
|
||||
"endpointConfig": {
|
||||
"data": {
|
||||
"event": {
|
||||
"connections": [
|
||||
{
|
||||
"additionalOptions": {
|
||||
"batching": {
|
||||
"maxPayloadSizePerReport": 256000,
|
||||
"reportingInterval": 2000
|
||||
},
|
||||
"retention": {
|
||||
"maxEventRetentionTimeInMin": 500,
|
||||
"maxNumEvents": 150000,
|
||||
"throttle": 100
|
||||
}
|
||||
},
|
||||
"description": "",
|
||||
"name": "LST",
|
||||
"options": {
|
||||
"URL": "https://usday1prod.alpla.net/lst/old/api/rfid/taginfo/line3.4",
|
||||
"security": {
|
||||
"CACertificateFileLocation": "",
|
||||
"authenticationOptions": {
|
||||
"privateKeyFileLocation": "/readerconfig/ssl/server.key",
|
||||
"publicKeyFileLocation": "/readerconfig/ssl/server.crt"
|
||||
},
|
||||
"authenticationType": "NONE",
|
||||
"verifyHost": false,
|
||||
"verifyPeer": false
|
||||
}
|
||||
},
|
||||
"type": "httpPost"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"managementEventConfig": {
|
||||
"errors": {
|
||||
"antenna": false,
|
||||
"cpu": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 90
|
||||
},
|
||||
"database": true,
|
||||
"flash": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 90
|
||||
},
|
||||
"ntp": true,
|
||||
"radio": true,
|
||||
"radio_control": true,
|
||||
"ram": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 90
|
||||
},
|
||||
"reader_gateway": true,
|
||||
"userApp": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 120
|
||||
}
|
||||
},
|
||||
"gpiEvents": true,
|
||||
"gpoEvents": true,
|
||||
"heartbeat": {
|
||||
"fields": {
|
||||
"radio_control": [
|
||||
"ANTENNAS",
|
||||
"RADIO_ACTIVITY",
|
||||
"RADIO_CONNECTION",
|
||||
"CPU",
|
||||
"RAM",
|
||||
"UPTIME",
|
||||
"NUM_ERRORS",
|
||||
"NUM_WARNINGS",
|
||||
"NUM_TAG_READS",
|
||||
"NUM_TAG_READS_PER_ANTENNA",
|
||||
"NUM_DATA_MESSAGES_TXED",
|
||||
"NUM_RADIO_PACKETS_RXED"
|
||||
],
|
||||
"reader_gateway": [
|
||||
"NUM_DATA_MESSAGES_RXED",
|
||||
"NUM_MANAGEMENT_EVENTS_TXED",
|
||||
"NUM_DATA_MESSAGES_TXED",
|
||||
"NUM_DATA_MESSAGES_RETAINED",
|
||||
"NUM_DATA_MESSAGES_DROPPED",
|
||||
"CPU",
|
||||
"RAM",
|
||||
"UPTIME",
|
||||
"NUM_ERRORS",
|
||||
"NUM_WARNINGS",
|
||||
"INTERFACE_CONNECTION_STATUS",
|
||||
"NOLOCKQ_DEPTH"
|
||||
],
|
||||
"system": [
|
||||
"CPU",
|
||||
"FLASH",
|
||||
"NTP",
|
||||
"RAM",
|
||||
"SYSTEMTIME",
|
||||
"TEMPERATURE",
|
||||
"UPTIME",
|
||||
"GPO",
|
||||
"GPI",
|
||||
"POWER_NEGOTIATION",
|
||||
"POWER_SOURCE",
|
||||
"MAC_ADDRESS",
|
||||
"HOSTNAME"
|
||||
],
|
||||
"userDefined": null,
|
||||
"userapps": [
|
||||
"STATUS",
|
||||
"CPU",
|
||||
"RAM",
|
||||
"UPTIME",
|
||||
"NUM_DATA_MESSAGES_RXED",
|
||||
"NUM_DATA_MESSAGES_TXED",
|
||||
"INCOMING_DATA_BUFFER_PERCENTAGE_REMAINING",
|
||||
"OUTGOING_DATA_BUFFER_PERCENTAGE_REMAINING"
|
||||
]
|
||||
},
|
||||
"interval": 60
|
||||
},
|
||||
"userappEvents": true,
|
||||
"warnings": {
|
||||
"cpu": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 80
|
||||
},
|
||||
"database": true,
|
||||
"flash": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 80
|
||||
},
|
||||
"ntp": true,
|
||||
"radio_api": true,
|
||||
"radio_control": true,
|
||||
"ram": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 80
|
||||
},
|
||||
"reader_gateway": true,
|
||||
"temperature": {
|
||||
"ambient": 75,
|
||||
"pa": 105
|
||||
},
|
||||
"userApp": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 60
|
||||
}
|
||||
}
|
||||
},
|
||||
"retention": [
|
||||
{
|
||||
"maxEventRetentionTimeInMin": 500,
|
||||
"maxNumEvents": 150000,
|
||||
"throttle": 100
|
||||
}
|
||||
]
|
||||
},
|
||||
"xml": "<?xml version='1.0'?>\n<Motorola xmlns:Falcon='http://www.motorola.com/RFID/Readers/Config/Falcon' xmlns='http://www.motorola.com/RFID/Readers/Config/Falcon'>\n<Config>\n<AppVersion major='3' minor='28' build='1' maintenance='0'/>\n<CommConfig EnabledStacks='IPV4' DisableRAPktProcessing='1' EnableDHCPv6='1' IPv6StaticIPAddr='fe80::1' IPv6SubnetMask='64' IPv6StaticGateway='::' IPv6DNSIP='fe80::20' DHCP='1' IPAddr='10.44.14.39' Mask='255.255.255.0' Gateway='10.44.14.252' DNS='10.44.9.250' DomainSearch='example.com' HttpRunning='2' TelnetActive='2' FtpActive='2' usbMode='0' WatchdogEnabled='1' AvahiEnabled='1' NetBIOSEnabled='0' RDMPAgentEnabled='1' SerialConTimeout='0' SNTP='0.0.0.0' SNTPHostName='pool.ntp.org' sntpHostDisplayMode='0' llrpClientMode='0' llrpSecureMode='0' llrpSecureModeValidatePeer='0' llrpPort='5084' llrpHostIP='192.168.127.2' allowllrpConnOverride='0' shouldReconnect='1'/>\n<Bluetooth discoverable='0' pairable='0' PincodeEnabled='0' passkey='165CB22DA5BE7BBEFB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03' startIP='192.168.0.2' endIP='192.168.0.3'/>\n<WirelessConfig essid='' autoconnect='0'/>\n<RegionConfig RFCountry='United States/Canada' RFRegulatory='US FCC 15' RFScanMode='0' LBTEnable='0' ChannelData='FFFFFFFFFFFFFFFF'/>\n<SnmpConfig snmpVersion='1' heartbeat='1'/>\n<SyslogConfig RemoteIp='0.0.0.0' RemotePort='514' LogMinSeverity='7' ApplyFilter='0' MinimumSeverity='7' ProcessFilter='rmserver.elf,llrpserver.elf,snmpextagent.elf,RDMPAgent'/>\n<UserList>\n<User name='admin' PSWD='$6$weLpDwlv$utr0AwgPIae2O4Gln4cQ2IJJblXye412Xqni0V.ahIFKUOCEDGjzZ4ttthhrw7rmmQYsCXKwA9znyqPkAT.IL/'/>\n<User name='rfidadm' PSWD='15491'/>\n</UserList>\n<IPReader name='FX96007AF832 FX9600 RFID Reader' desc='FX96007AF832 Advanced Reader' flags='0' MonoStatic='0' CheckAntenna='1' gpiDebounceTime='0' gpioMapping='0' idleModeTimeOut='0' diagMode='0' extDiagMode='0' contact='Zebra Technologies Corporation' PowerNegotiation='0' PowerNegotiationProtocol='0' allowGuestLogin='1' configureHostName='0'>\n<ReadPoint name='Read Point 1' flags='0' CableLossPerHundredFt='10' CableLength='10'/>\n<ReadPoint name='Read Point 2' flags='0' CableLossPerHundredFt='10' CableLength='10'/>\n<ReadPoint name='Read Point 3' flags='1' CableLossPerHundredFt='10' CableLength='10'/>\n<ReadPoint name='Read Point 4' flags='1' CableLossPerHundredFt='10' CableLength='10'/>\n</IPReader>\n<SerialPortConf Mode='0' Baudrate='115200' Databits='8' Parity='none' Stopbits='1' Flowcontrol='hardware' TagMetaData='0' InventoryControl='0' IsAutostart='0'/>\n<FXConnectConfig FXConnectMode='0' TagMetaData='0' InventoryControl='None' HeartBeatPeriod='0' IsAutostart='0' PreFilterMode='0' PreFilters='None'/>\n<ProfinetConfig virtualDAP='1'/>\n<NodeJSPortConf Portnumber='8001'/>\n</Config>\n<MOTOROLA_LLRP_CONFIG><LLRP_READER_CONFIG />\n</MOTOROLA_LLRP_CONFIG>\n<IOT_CONNECT_CONFIG><OPERATING_MODE />\n</IOT_CONNECT_CONFIG>\n<RadioProfileData><RadioRegisterData Address='0' Data='00'/>\n</RadioProfileData>\n<CustomProfileData ForceEAPMode='0' FIPS_MODE_ENABLED='0' MaxNumberOfTagsBuffered='512'/>\n</Motorola >\n"
|
||||
}
|
||||
206
backend/rfid/daytonConfig.json
Normal file
206
backend/rfid/daytonConfig.json
Normal file
@@ -0,0 +1,206 @@
|
||||
{
|
||||
"GPIO-LED": {
|
||||
"GPODefaults": {
|
||||
"1": "HIGH",
|
||||
"2": "HIGH",
|
||||
"3": "HIGH",
|
||||
"4": "HIGH"
|
||||
},
|
||||
"LEDDefaults": {
|
||||
"3": "GREEN"
|
||||
},
|
||||
"TAG_READ": [
|
||||
{
|
||||
"pin": 1,
|
||||
"state": "HIGH",
|
||||
"type": "GPO"
|
||||
}
|
||||
]
|
||||
},
|
||||
"READER-GATEWAY": {
|
||||
"batching": [
|
||||
{
|
||||
"maxPayloadSizePerReport": 256000,
|
||||
"reportingInterval": 2000
|
||||
}
|
||||
],
|
||||
"endpointConfig": {
|
||||
"data": {
|
||||
"event": {
|
||||
"connections": [
|
||||
{
|
||||
"additionalOptions": {
|
||||
"retention": {
|
||||
"maxEventRetentionTimeInMin": 500,
|
||||
"maxNumEvents": 150000,
|
||||
"throttle": 100
|
||||
}
|
||||
},
|
||||
"description": "",
|
||||
"name": "lst",
|
||||
"options": {
|
||||
"URL": "http://usday1vms006:3100/api/rfid/taginfo/wrapper1",
|
||||
"security": {
|
||||
"CACertificateFileLocation": "",
|
||||
"authenticationOptions": {},
|
||||
"authenticationType": "NONE",
|
||||
"verifyHost": false,
|
||||
"verifyPeer": false
|
||||
}
|
||||
},
|
||||
"type": "httpPost"
|
||||
},
|
||||
{
|
||||
"additionalOptions": {
|
||||
"retention": {
|
||||
"maxEventRetentionTimeInMin": 500,
|
||||
"maxNumEvents": 150000,
|
||||
"throttle": 100
|
||||
}
|
||||
},
|
||||
"description": "",
|
||||
"name": "mgt",
|
||||
"options": {
|
||||
"URL": "http://usday1vms006:3100/api/rfid/mgtevents/wrapper1",
|
||||
"security": {
|
||||
"CACertificateFileLocation": "",
|
||||
"authenticationOptions": {},
|
||||
"authenticationType": "NONE",
|
||||
"verifyHost": false,
|
||||
"verifyPeer": false
|
||||
}
|
||||
},
|
||||
"type": "httpPost"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"interfaces": {
|
||||
"tagDataInterface1": "lst",
|
||||
"managementEventsInterface": "mgt"
|
||||
},
|
||||
"managementEventConfig": {
|
||||
"errors": {
|
||||
"antenna": false,
|
||||
"cpu": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 90
|
||||
},
|
||||
"database": true,
|
||||
"flash": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 90
|
||||
},
|
||||
"ntp": true,
|
||||
"radio": true,
|
||||
"radio_control": true,
|
||||
"ram": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 90
|
||||
},
|
||||
"reader_gateway": true,
|
||||
"userApp": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 120
|
||||
}
|
||||
},
|
||||
"gpiEvents": true,
|
||||
"gpoEvents": true,
|
||||
"heartbeat": {
|
||||
"fields": {
|
||||
"radio_control": [
|
||||
"ANTENNAS",
|
||||
"RADIO_ACTIVITY",
|
||||
"RADIO_CONNECTION",
|
||||
"CPU",
|
||||
"RAM",
|
||||
"UPTIME",
|
||||
"NUM_ERRORS",
|
||||
"NUM_WARNINGS",
|
||||
"NUM_TAG_READS",
|
||||
"NUM_TAG_READS_PER_ANTENNA",
|
||||
"NUM_DATA_MESSAGES_TXED",
|
||||
"NUM_RADIO_PACKETS_RXED"
|
||||
],
|
||||
"reader_gateway": [
|
||||
"NUM_DATA_MESSAGES_RXED",
|
||||
"NUM_MANAGEMENT_EVENTS_TXED",
|
||||
"NUM_DATA_MESSAGES_TXED",
|
||||
"NUM_DATA_MESSAGES_RETAINED",
|
||||
"NUM_DATA_MESSAGES_DROPPED",
|
||||
"CPU",
|
||||
"RAM",
|
||||
"UPTIME",
|
||||
"NUM_ERRORS",
|
||||
"NUM_WARNINGS",
|
||||
"INTERFACE_CONNECTION_STATUS",
|
||||
"NOLOCKQ_DEPTH"
|
||||
],
|
||||
"system": [
|
||||
"CPU",
|
||||
"FLASH",
|
||||
"NTP",
|
||||
"RAM",
|
||||
"SYSTEMTIME",
|
||||
"TEMPERATURE",
|
||||
"UPTIME",
|
||||
"GPO",
|
||||
"GPI",
|
||||
"POWER_NEGOTIATION",
|
||||
"POWER_SOURCE",
|
||||
"MAC_ADDRESS",
|
||||
"HOSTNAME"
|
||||
],
|
||||
"userDefined": null,
|
||||
"userapps": [
|
||||
"STATUS",
|
||||
"CPU",
|
||||
"RAM",
|
||||
"UPTIME",
|
||||
"NUM_DATA_MESSAGES_RXED",
|
||||
"NUM_DATA_MESSAGES_TXED",
|
||||
"INCOMING_DATA_BUFFER_PERCENTAGE_REMAINING",
|
||||
"OUTGOING_DATA_BUFFER_PERCENTAGE_REMAINING"
|
||||
]
|
||||
},
|
||||
"interval": 60
|
||||
},
|
||||
"userappEvents": true,
|
||||
"warnings": {
|
||||
"cpu": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 80
|
||||
},
|
||||
"database": true,
|
||||
"flash": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 80
|
||||
},
|
||||
"ntp": true,
|
||||
"radio_api": true,
|
||||
"radio_control": true,
|
||||
"ram": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 80
|
||||
},
|
||||
"reader_gateway": true,
|
||||
"temperature": {
|
||||
"ambient": 75,
|
||||
"pa": 105
|
||||
},
|
||||
"userApp": {
|
||||
"reportIntervalInSec": 1800,
|
||||
"threshold": 60
|
||||
}
|
||||
}
|
||||
},
|
||||
"retention": [
|
||||
{
|
||||
"maxEventRetentionTimeInMin": 500,
|
||||
"maxNumEvents": 150000,
|
||||
"throttle": 100
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
27
backend/routeHandler.routes.ts
Normal file
27
backend/routeHandler.routes.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { Express } from "express";
|
||||
|
||||
import { setupAuthRoutes } from "./auth/auth.routes.js";
|
||||
// import the routes and route setups
|
||||
import { setupApiDocsRoutes } from "./configs/scaler.config.js";
|
||||
import { setupDatamartRoutes } from "./datamart/datamart.routes.js";
|
||||
import { setupGPSqlRoutes } from "./gpSql/gpSql.routes.js";
|
||||
import { setupNotificationRoutes } from "./notification/notification.routes.js";
|
||||
import { setupOCPRoutes } from "./ocp/ocp.routes.js";
|
||||
import { setupOpendockRoutes } from "./opendock/opendock.routes.js";
|
||||
import { setupProdSqlRoutes } from "./prodSql/prodSql.routes.js";
|
||||
import { setupSystemRoutes } from "./system/system.routes.js";
|
||||
import { setupUtilsRoutes } from "./utils/utils.routes.js";
|
||||
|
||||
export const setupRoutes = (baseUrl: string, app: Express) => {
|
||||
//routes that are on by default
|
||||
setupSystemRoutes(baseUrl, app);
|
||||
setupApiDocsRoutes(baseUrl, app);
|
||||
setupProdSqlRoutes(baseUrl, app);
|
||||
setupGPSqlRoutes(baseUrl, app);
|
||||
setupDatamartRoutes(baseUrl, app);
|
||||
setupAuthRoutes(baseUrl, app);
|
||||
setupUtilsRoutes(baseUrl, app);
|
||||
setupOpendockRoutes(baseUrl, app);
|
||||
setupNotificationRoutes(baseUrl, app);
|
||||
setupOCPRoutes(baseUrl, app);
|
||||
};
|
||||
43
backend/scaler/cronerActiveJobs.spec.ts
Normal file
43
backend/scaler/cronerActiveJobs.spec.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { OpenAPIV3_1 } from "openapi-types";
|
||||
|
||||
export const cronerActiveJobs: OpenAPIV3_1.PathsObject = {
|
||||
"/api/utils/croner": {
|
||||
get: {
|
||||
summary: "Cron jobs",
|
||||
description: "Returns all jobs on the server.",
|
||||
tags: ["Utils"],
|
||||
responses: {
|
||||
"200": {
|
||||
description: "Jobs returned",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
status: {
|
||||
type: "boolean",
|
||||
format: "boolean",
|
||||
example: true,
|
||||
},
|
||||
uptime: {
|
||||
type: "number",
|
||||
format: "3454.34",
|
||||
example: 3454.34,
|
||||
},
|
||||
memoryUsage: {
|
||||
type: "string",
|
||||
format: "Heap: 11.62 MB / RSS: 86.31 MB",
|
||||
},
|
||||
sqlServerStats: {
|
||||
type: "number",
|
||||
format: "442127",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
94
backend/scaler/cronerStatusChange.spec.ts
Normal file
94
backend/scaler/cronerStatusChange.spec.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import type { OpenAPIV3_1 } from "openapi-types";
|
||||
|
||||
export const cronerStatusChange: OpenAPIV3_1.PathsObject = {
|
||||
"/api/utils/croner/{status}": {
|
||||
patch: {
|
||||
summary: "Pauses or Resume the Job",
|
||||
description:
|
||||
"When sending start or stop with job name it will resume or stop the job",
|
||||
tags: ["Utils"],
|
||||
|
||||
parameters: [
|
||||
{
|
||||
name: "status",
|
||||
in: "path",
|
||||
required: true,
|
||||
description: "Status change",
|
||||
schema: {
|
||||
type: "string",
|
||||
},
|
||||
example: "start",
|
||||
},
|
||||
{
|
||||
name: "limit",
|
||||
in: "query",
|
||||
required: false, // 👈 optional
|
||||
description: "Maximum number of records to return",
|
||||
schema: {
|
||||
type: "integer",
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
},
|
||||
example: 10,
|
||||
},
|
||||
],
|
||||
requestBody: {
|
||||
required: true,
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: {
|
||||
type: "object",
|
||||
required: ["name"],
|
||||
properties: {
|
||||
name: {
|
||||
type: "string",
|
||||
example: "start",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
responses: {
|
||||
"200": {
|
||||
description: "Successful response",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
success: { type: "boolean", example: true },
|
||||
data: {
|
||||
type: "object",
|
||||
example: {
|
||||
name: "exampleName",
|
||||
value: "some value",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"400": {
|
||||
description: "Bad request",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
success: { type: "boolean", example: false },
|
||||
message: {
|
||||
type: "string",
|
||||
example: "Invalid name parameter",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -31,6 +31,12 @@ export const datamartAddSpec: OpenAPIV3_1.PathsObject = {
|
||||
description:
|
||||
"Optional comma separated options string passed to the query",
|
||||
},
|
||||
publicAccess: {
|
||||
type: "boolean",
|
||||
example: "true",
|
||||
description:
|
||||
"Will this query be accessible by the frontend's",
|
||||
},
|
||||
queryFile: {
|
||||
type: "string",
|
||||
format: "binary",
|
||||
36
backend/scaler/opendockGetRelease.spec.ts
Normal file
36
backend/scaler/opendockGetRelease.spec.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import type { OpenAPIV3_1 } from "openapi-types";
|
||||
|
||||
export const openDockApt: OpenAPIV3_1.PathsObject = {
|
||||
"/api/opendock": {
|
||||
get: {
|
||||
summary: "Open Dock apt",
|
||||
description: "Returns the last 30 days of apt(s).",
|
||||
tags: ["Open Dock"],
|
||||
responses: {
|
||||
"200": {
|
||||
description: "Jobs returned",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
success: {
|
||||
type: "boolean",
|
||||
format: "boolean",
|
||||
example: true,
|
||||
},
|
||||
message: {
|
||||
type: "string",
|
||||
format: "string",
|
||||
example:
|
||||
"The first 5 Apt(s) that were created in the last 30 days",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -3,6 +3,7 @@ import type { OpenAPIV3_1 } from "openapi-types";
|
||||
export const prodRestartSpec: OpenAPIV3_1.PathsObject = {
|
||||
"/api/system/prodSql/restart": {
|
||||
post: {
|
||||
//security: [],
|
||||
summary: "Prod restart sql connection",
|
||||
description: "Attempts to restart the sql connection.",
|
||||
tags: ["System"],
|
||||
@@ -1,20 +1,70 @@
|
||||
import { createServer } from "node:http";
|
||||
import os from "node:os";
|
||||
import createApp from "./app.js";
|
||||
import { createLogger } from "./src/logger/logger.controller.js";
|
||||
import { connectProdSql } from "./src/prodSql/prodSqlConnection.controller.js";
|
||||
import { db } from "./db/db.controller.js";
|
||||
import { dbCleanup } from "./db/dbCleanup.controller.js";
|
||||
import { type Setting, settings } from "./db/schema/settings.schema.js";
|
||||
import { connectGPSql } from "./gpSql/gpSqlConnection.controller.js";
|
||||
import { createLogger } from "./logger/logger.controller.js";
|
||||
import { startNotifications } from "./notification/notification.controller.js";
|
||||
import { createNotifications } from "./notification/notifications.master.js";
|
||||
import { monitorReleaseChanges } from "./opendock/openDockRreleaseMonitor.utils.js";
|
||||
import { opendockSocketMonitor } from "./opendock/opendockSocketMonitor.utils.js";
|
||||
import { connectProdSql } from "./prodSql/prodSqlConnection.controller.js";
|
||||
import { monitorAlplaPurchase } from "./purchase/purchase.controller.js";
|
||||
import { setupSocketIORoutes } from "./socket.io/serverSetup.js";
|
||||
import { baseSettingValidationCheck } from "./system/settingsBase.controller.js";
|
||||
import { createCronJob } from "./utils/croner.utils.js";
|
||||
|
||||
const port = Number(process.env.PORT) || 3000;
|
||||
|
||||
export let systemSettings: Setting[] = [];
|
||||
const start = async () => {
|
||||
const log = createLogger({ module: "system", subModule: "main start" });
|
||||
|
||||
connectProdSql();
|
||||
|
||||
const { app, baseUrl } = await createApp();
|
||||
|
||||
app.listen(port, async () => {
|
||||
const server = createServer(app);
|
||||
|
||||
setupSocketIORoutes(baseUrl, server);
|
||||
|
||||
const log = createLogger({ module: "system", subModule: "main start" });
|
||||
|
||||
// triggering long lived processes
|
||||
connectProdSql();
|
||||
connectGPSql();
|
||||
|
||||
// trigger startup processes these must run before anything else can run
|
||||
await baseSettingValidationCheck();
|
||||
systemSettings = await db.select().from(settings);
|
||||
|
||||
//when starting up long lived features the name must match the setting name.
|
||||
// also we always want to have long lived processes inside a setting check.
|
||||
setTimeout(() => {
|
||||
if (systemSettings.filter((n) => n.name === "opendock_sync")[0]?.active) {
|
||||
log.info({}, "Opendock is active");
|
||||
monitorReleaseChanges(); // this is od monitoring the db for all new releases
|
||||
opendockSocketMonitor();
|
||||
createCronJob("opendockAptCleanup", "0 30 5 * * *", () =>
|
||||
dbCleanup("opendockApt", 90),
|
||||
);
|
||||
}
|
||||
|
||||
if (systemSettings.filter((n) => n.name === "purchaseMonitor")[0]?.active) {
|
||||
monitorAlplaPurchase();
|
||||
}
|
||||
|
||||
// these jobs below are system jobs and should run no matter what.
|
||||
createCronJob("JobAuditLogCleanUp", "0 0 5 * * *", () =>
|
||||
dbCleanup("jobs", 30),
|
||||
);
|
||||
createCronJob("logsCleanup", "0 15 5 * * *", () => dbCleanup("logs", 120));
|
||||
|
||||
// one shots only needed to run on server startups
|
||||
createNotifications();
|
||||
startNotifications();
|
||||
}, 5 * 1000);
|
||||
|
||||
server.listen(port, async () => {
|
||||
log.info(
|
||||
`Listening on http://${os.hostname()}:${port}${baseUrl}, logging in ${process.env.LOG_LEVEL}`,
|
||||
`Listening on http://${os.hostname()}:${port}${baseUrl}, logging in ${process.env.LOG_LEVEL}, current ENV ${process.env.NODE_ENV ? process.env.NODE_ENV : "development"}`,
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user