Compare commits
37 Commits
9ca24a266a
...
v0.0.1-alp
| Author | SHA1 | Date | |
|---|---|---|---|
| 82eaa23da7 | |||
| b18d1ced6d | |||
| 69c5cf87fd | |||
| 1fadf0ad25 | |||
| beae6eb648 | |||
| 82ab735982 | |||
| dbd56c1b50 | |||
| 037a473ab7 | |||
| 32998d417f | |||
| ddcb7e76a3 | |||
| 191cb2b698 | |||
| 2021141967 | |||
| 751c8f21ab | |||
| 85073c19d2 | |||
| 6b8d7b53d0 | |||
| e025d0f5cc | |||
| e67e9e6d72 | |||
| 2846b9cb0d | |||
| 5db2a7fe75 | |||
| 81dc575b4f | |||
| bf7d765989 | |||
| 4f24fe4660 | |||
| 68d13b03d3 | |||
| c3379919b9 | |||
| 326c2e125c | |||
| 880902c478 | |||
| 100c9ff9be | |||
| a8af021621 | |||
| 5469a0dc5c | |||
| 2d1f613d39 | |||
| 597d990a69 | |||
| 76503f558b | |||
| 23c000fa7f | |||
| 31f8c368d9 | |||
| 81bd4d6dcb | |||
| 152f7042c9 | |||
| ba4635a7a7 |
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://unpkg.com/@changesets/config@3.1.2/schema.json",
|
"$schema": "https://unpkg.com/@changesets/config/schema.json",
|
||||||
"changelog": "@changesets/cli/changelog",
|
"changelog": "@changesets/cli/changelog",
|
||||||
"commit": false,
|
"commit": false,
|
||||||
"fixed": [],
|
"fixed": [],
|
||||||
|
|||||||
5
.changeset/neat-years-unite.md
Normal file
5
.changeset/neat-years-unite.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
"lst_v3": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
build stuff
|
||||||
11
.changeset/pre.json
Normal file
11
.changeset/pre.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"mode": "pre",
|
||||||
|
"tag": "alpha",
|
||||||
|
"initialVersions": {
|
||||||
|
"lst_v3": "1.0.1"
|
||||||
|
},
|
||||||
|
"changesets": [
|
||||||
|
"neat-years-unite",
|
||||||
|
"soft-onions-appear"
|
||||||
|
]
|
||||||
|
}
|
||||||
5
.changeset/soft-onions-appear.md
Normal file
5
.changeset/soft-onions-appear.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
"lst_v3": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
external url added for docker
|
||||||
@@ -4,4 +4,9 @@ node_modules
|
|||||||
dist
|
dist
|
||||||
Dockerfile
|
Dockerfile
|
||||||
docker-compose.yml
|
docker-compose.yml
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
|
builds
|
||||||
|
testFiles
|
||||||
|
nssm.exe
|
||||||
|
postgresql-17.9-2-windows-x64.exe
|
||||||
|
VSCodeUserSetup-x64-1.112.0.msi
|
||||||
31
.gitea/workflows/docker-build.yml
Normal file
31
.gitea/workflows/docker-build.yml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: Build and Push LST Docker Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
docker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout (local)
|
||||||
|
run: |
|
||||||
|
git clone https://git.tuffraid.net/cowch/lst_v3.git .
|
||||||
|
git checkout ${{ gitea.sha }}
|
||||||
|
|
||||||
|
- name: Login to registry
|
||||||
|
run: echo "${{ secrets.PASSWORD }}" | docker login git.tuffraid.net -u "cowch" --password-stdin
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
docker build \
|
||||||
|
-t git.tuffraid.net/cowch/lst_v3:latest \
|
||||||
|
-t git.tuffraid.net/cowch/lst_v3:${{ gitea.sha }} \
|
||||||
|
.
|
||||||
|
|
||||||
|
- name: Push
|
||||||
|
run: |
|
||||||
|
docker push git.tuffraid.net/cowch/lst_v3:latest
|
||||||
|
docker push git.tuffraid.net/cowch/lst_v3:${{ gitea.sha }}
|
||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -1,5 +1,15 @@
|
|||||||
# ---> Node
|
# ---> Node
|
||||||
testFiles
|
testFiles
|
||||||
|
builds
|
||||||
|
.includes
|
||||||
|
.buildNumber
|
||||||
|
temp
|
||||||
|
.scriptCreds
|
||||||
|
node-v24.14.0-x64.msi
|
||||||
|
postgresql-17.9-2-windows-x64.exe
|
||||||
|
VSCodeUserSetup-x64-1.112.0.exe
|
||||||
|
nssm.exe
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
logs
|
logs
|
||||||
*.log
|
*.log
|
||||||
|
|||||||
1
.vscode/lst.code-snippets
vendored
1
.vscode/lst.code-snippets
vendored
@@ -10,6 +10,7 @@
|
|||||||
"\tmessage: \"${5:Failed to connect to the prod sql server.}\",",
|
"\tmessage: \"${5:Failed to connect to the prod sql server.}\",",
|
||||||
"\tdata: ${6:[]},",
|
"\tdata: ${6:[]},",
|
||||||
"\tnotify: ${7:false},",
|
"\tnotify: ${7:false},",
|
||||||
|
"\troom: ${8:''},",
|
||||||
"});"
|
"});"
|
||||||
],
|
],
|
||||||
"description": "Insert a returnFunc template"
|
"description": "Insert a returnFunc template"
|
||||||
|
|||||||
13
.vscode/settings.json
vendored
13
.vscode/settings.json
vendored
@@ -3,6 +3,8 @@
|
|||||||
"workbench.colorTheme": "Default Dark+",
|
"workbench.colorTheme": "Default Dark+",
|
||||||
"terminal.integrated.env.windows": {},
|
"terminal.integrated.env.windows": {},
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
|
"typescript.preferences.importModuleSpecifier": "relative",
|
||||||
|
"javascript.preferences.importModuleSpecifier": "relative",
|
||||||
"editor.codeActionsOnSave": {
|
"editor.codeActionsOnSave": {
|
||||||
"source.fixAll.biome": "explicit",
|
"source.fixAll.biome": "explicit",
|
||||||
"source.organizeImports.biome": "explicit"
|
"source.organizeImports.biome": "explicit"
|
||||||
@@ -52,12 +54,21 @@
|
|||||||
"alpla",
|
"alpla",
|
||||||
"alplamart",
|
"alplamart",
|
||||||
"alplaprod",
|
"alplaprod",
|
||||||
|
"bookin",
|
||||||
"Datamart",
|
"Datamart",
|
||||||
|
"dyco",
|
||||||
"intiallally",
|
"intiallally",
|
||||||
|
"manadatory",
|
||||||
"OCME",
|
"OCME",
|
||||||
"onnotice",
|
"onnotice",
|
||||||
|
"opendock",
|
||||||
|
"opendocks",
|
||||||
"ppoo",
|
"ppoo",
|
||||||
"prodlabels"
|
"preseed",
|
||||||
|
"prodlabels",
|
||||||
|
"prolink",
|
||||||
|
"Skelly",
|
||||||
|
"trycatch"
|
||||||
],
|
],
|
||||||
"gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db",
|
"gitea.token": "8456def90e1c651a761a8711763d6ef225d6b2db",
|
||||||
"gitea.instanceURL": "https://git.tuffraid.net",
|
"gitea.instanceURL": "https://git.tuffraid.net",
|
||||||
|
|||||||
@@ -1,5 +1,12 @@
|
|||||||
# lst_v3
|
# lst_v3
|
||||||
|
|
||||||
|
## 1.0.2-alpha.0
|
||||||
|
|
||||||
|
### Patch Changes
|
||||||
|
|
||||||
|
- build stuff
|
||||||
|
- external url added for docker
|
||||||
|
|
||||||
## 1.0.1
|
## 1.0.1
|
||||||
|
|
||||||
### Patch Changes
|
### Patch Changes
|
||||||
|
|||||||
10
Dockerfile
10
Dockerfile
@@ -9,10 +9,13 @@ WORKDIR /app
|
|||||||
# Copy package files
|
# Copy package files
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Install production dependencies only
|
# build backend
|
||||||
RUN npm ci
|
RUN npm ci
|
||||||
|
RUN npm run build:docker
|
||||||
|
|
||||||
RUN npm run build
|
# build frontend
|
||||||
|
RUN npm --prefix frontend ci
|
||||||
|
RUN npm --prefix frontend run build
|
||||||
|
|
||||||
###########
|
###########
|
||||||
# Stage 2 #
|
# Stage 2 #
|
||||||
@@ -33,6 +36,9 @@ RUN npm ci --omit=dev
|
|||||||
|
|
||||||
|
|
||||||
COPY --from=build /app/dist ./dist
|
COPY --from=build /app/dist ./dist
|
||||||
|
COPY --from=build /app/frontend/dist ./frontend/dist
|
||||||
|
|
||||||
|
# TODO add in drizzle migrates
|
||||||
|
|
||||||
ENV RUNNING_IN_DOCKER=true
|
ENV RUNNING_IN_DOCKER=true
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
|
import { dirname, join } from "node:path";
|
||||||
|
import { fileURLToPath } from "node:url";
|
||||||
import { toNodeHandler } from "better-auth/node";
|
import { toNodeHandler } from "better-auth/node";
|
||||||
import express from "express";
|
import express from "express";
|
||||||
import morgan from "morgan";
|
import morgan from "morgan";
|
||||||
import { createLogger } from "./src/logger/logger.controller.js";
|
import { createLogger } from "./logger/logger.controller.js";
|
||||||
import { setupRoutes } from "./src/routeHandler.routes.js";
|
import { setupRoutes } from "./routeHandler.routes.js";
|
||||||
import { auth } from "./src/utils/auth.utils.js";
|
import { auth } from "./utils/auth.utils.js";
|
||||||
import { lstCors } from "./src/utils/cors.utils.js";
|
import { lstCors } from "./utils/cors.utils.js";
|
||||||
|
|
||||||
const createApp = async () => {
|
const createApp = async () => {
|
||||||
const log = createLogger({ module: "system", subModule: "main start" });
|
const log = createLogger({ module: "system", subModule: "main start" });
|
||||||
@@ -20,15 +22,34 @@ const createApp = async () => {
|
|||||||
baseUrl = "/lst";
|
baseUrl = "/lst";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
// well leave this active so we can monitor it to validate
|
// well leave this active so we can monitor it to validate
|
||||||
app.use(morgan("tiny"));
|
app.use(morgan("tiny"));
|
||||||
app.set("trust proxy", true);
|
app.set("trust proxy", true);
|
||||||
app.all(`${baseUrl}api/auth/*splat`, toNodeHandler(auth));
|
|
||||||
app.use(express.json());
|
|
||||||
app.use(lstCors());
|
app.use(lstCors());
|
||||||
|
app.all(`${baseUrl}/api/auth/*splat`, toNodeHandler(auth));
|
||||||
|
app.use(express.json());
|
||||||
setupRoutes(baseUrl, app);
|
setupRoutes(baseUrl, app);
|
||||||
|
|
||||||
log.info("Express app created");
|
app.use(
|
||||||
|
baseUrl + "/app",
|
||||||
|
express.static(join(__dirname, "../frontend/dist")),
|
||||||
|
);
|
||||||
|
|
||||||
|
app.get(baseUrl + "/app/*splat", (_, res) => {
|
||||||
|
res.sendFile(join(__dirname, "../frontend/dist/index.html"));
|
||||||
|
});
|
||||||
|
|
||||||
|
app.all("*foo", (_, res) => {
|
||||||
|
res.status(400).json({
|
||||||
|
message:
|
||||||
|
"You have encountered a route that dose not exist, please check the url and try again",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info("Lst app created");
|
||||||
return { app, baseUrl };
|
return { app, baseUrl };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -54,7 +54,8 @@ const signin = z.union([
|
|||||||
const r = Router();
|
const r = Router();
|
||||||
|
|
||||||
r.post("/", async (req, res) => {
|
r.post("/", async (req, res) => {
|
||||||
let login: unknown;
|
let login: unknown | any;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const validated = signin.parse(req.body);
|
const validated = signin.parse(req.body);
|
||||||
if ("email" in validated) {
|
if ("email" in validated) {
|
||||||
@@ -92,6 +93,27 @@ r.post("/", async (req, res) => {
|
|||||||
password: validated.password,
|
password: validated.password,
|
||||||
},
|
},
|
||||||
headers: fromNodeHeaders(req.headers),
|
headers: fromNodeHeaders(req.headers),
|
||||||
|
asResponse: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (login.status === 401) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error", //connect.success ? "info" : "error",
|
||||||
|
module: "routes",
|
||||||
|
subModule: "auth",
|
||||||
|
message: `Incorrect username or password please try again`,
|
||||||
|
data: [],
|
||||||
|
status: 401, //connect.success ? 200 : 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
login.headers.forEach((value: string, key: string) => {
|
||||||
|
if (key.toLowerCase() === "set-cookie") {
|
||||||
|
res.append("set-cookie", value);
|
||||||
|
} else {
|
||||||
|
res.setHeader(key, value);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { APIError } from "better-auth";
|
import { APIError } from "better-auth";
|
||||||
import { count, sql } from "drizzle-orm";
|
import { count, eq, sql } from "drizzle-orm";
|
||||||
import { Router } from "express";
|
import { Router } from "express";
|
||||||
import z from "zod";
|
import z from "zod";
|
||||||
import { db } from "../db/db.controller.js";
|
import { db } from "../db/db.controller.js";
|
||||||
@@ -58,7 +58,10 @@ r.post("/", async (req, res) => {
|
|||||||
// if we have no users yet lets make this new one the admin
|
// if we have no users yet lets make this new one the admin
|
||||||
if (userCount === 0) {
|
if (userCount === 0) {
|
||||||
// make this user an admin
|
// make this user an admin
|
||||||
await db.update(user).set({ role: "admin", updatedAt: sql`NOW()` });
|
await db
|
||||||
|
.update(user)
|
||||||
|
.set({ role: "admin", updatedAt: sql`NOW()` })
|
||||||
|
.where(eq(user.id, newUser.user.id));
|
||||||
}
|
}
|
||||||
|
|
||||||
apiReturn(res, {
|
apiReturn(res, {
|
||||||
@@ -78,7 +81,7 @@ r.post("/", async (req, res) => {
|
|||||||
// details: flattened,
|
// details: flattened,
|
||||||
// });
|
// });
|
||||||
|
|
||||||
apiReturn(res, {
|
return apiReturn(res, {
|
||||||
success: false,
|
success: false,
|
||||||
level: "error", //connect.success ? "info" : "error",
|
level: "error", //connect.success ? "info" : "error",
|
||||||
module: "routes",
|
module: "routes",
|
||||||
@@ -90,7 +93,7 @@ r.post("/", async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (err instanceof APIError) {
|
if (err instanceof APIError) {
|
||||||
apiReturn(res, {
|
return apiReturn(res, {
|
||||||
success: false,
|
success: false,
|
||||||
level: "error", //connect.success ? "info" : "error",
|
level: "error", //connect.success ? "info" : "error",
|
||||||
module: "routes",
|
module: "routes",
|
||||||
@@ -101,7 +104,7 @@ r.post("/", async (req, res) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
apiReturn(res, {
|
return apiReturn(res, {
|
||||||
success: false,
|
success: false,
|
||||||
level: "error", //connect.success ? "info" : "error",
|
level: "error", //connect.success ? "info" : "error",
|
||||||
module: "routes",
|
module: "routes",
|
||||||
@@ -5,13 +5,14 @@ import type { Express } from "express";
|
|||||||
//const __filename = fileURLToPath(import.meta.url);
|
//const __filename = fileURLToPath(import.meta.url);
|
||||||
// const __dirname = path.dirname(__filename);
|
// const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
|
import os from "node:os";
|
||||||
import { apiReference } from "@scalar/express-api-reference";
|
import { apiReference } from "@scalar/express-api-reference";
|
||||||
// const port = 3000;
|
// const port = 3000;
|
||||||
import type { OpenAPIV3_1 } from "openapi-types";
|
import type { OpenAPIV3_1 } from "openapi-types";
|
||||||
import { datamartAddSpec } from "../scaler/datamartAdd.spec.js";
|
import { cronerActiveJobs } from "../scaler/cronerActiveJobs.spec.js";
|
||||||
import { datamartUpdateSpec } from "../scaler/datamartUpdate.spec.js";
|
import { cronerStatusChange } from "../scaler/cronerStatusChange.spec.js";
|
||||||
import { getDatamartSpec } from "../scaler/getDatamart.spec.js";
|
|
||||||
import { prodLoginSpec } from "../scaler/login.spec.js";
|
import { prodLoginSpec } from "../scaler/login.spec.js";
|
||||||
|
import { openDockApt } from "../scaler/opendockGetRelease.spec.js";
|
||||||
import { prodRestartSpec } from "../scaler/prodSqlRestart.spec.js";
|
import { prodRestartSpec } from "../scaler/prodSqlRestart.spec.js";
|
||||||
import { prodStartSpec } from "../scaler/prodSqlStart.spec.js";
|
import { prodStartSpec } from "../scaler/prodSqlStart.spec.js";
|
||||||
import { prodStopSpec } from "../scaler/prodSqlStop.spec.js";
|
import { prodStopSpec } from "../scaler/prodSqlStop.spec.js";
|
||||||
@@ -28,10 +29,12 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
|||||||
},
|
},
|
||||||
servers: [
|
servers: [
|
||||||
{
|
{
|
||||||
url: `http://localhost:3000${process.env.NODE_ENV?.trim() !== "production" ? "/lst" : "/"}`,
|
// TODO: change this to the https:// if we are in production and port if not.
|
||||||
|
url: `http://${os.hostname()}:3000${process.env.NODE_ENV?.trim() !== "production" ? "/lst" : "/"}`,
|
||||||
description: "Development server",
|
description: "Development server",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|
||||||
components: {
|
components: {
|
||||||
securitySchemes: {
|
securitySchemes: {
|
||||||
bearerAuth: {
|
bearerAuth: {
|
||||||
@@ -50,6 +53,11 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
|||||||
scheme: "basic",
|
scheme: "basic",
|
||||||
description: "Basic authentication using username and password",
|
description: "Basic authentication using username and password",
|
||||||
},
|
},
|
||||||
|
cookieAuth: {
|
||||||
|
type: "apiKey",
|
||||||
|
in: "cookie",
|
||||||
|
name: "better-auth.session_token",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
// schemas: {
|
// schemas: {
|
||||||
// Error: {
|
// Error: {
|
||||||
@@ -61,7 +69,12 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
|||||||
// },
|
// },
|
||||||
// },.
|
// },.
|
||||||
},
|
},
|
||||||
|
// security: [
|
||||||
|
// {
|
||||||
|
// cookieAuth: [],
|
||||||
|
// basicAuth: [],
|
||||||
|
// },
|
||||||
|
// ],
|
||||||
tags: [
|
tags: [
|
||||||
{
|
{
|
||||||
name: "Auth",
|
name: "Auth",
|
||||||
@@ -73,9 +86,12 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
|||||||
description: "All system endpoints that will be available to run",
|
description: "All system endpoints that will be available to run",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Datamart",
|
name: "Utils",
|
||||||
description:
|
description: "All routes related to the utilities on the server",
|
||||||
"All Special queries to run based on there names.\n Refer to the docs to see all possible queries that can be ran here, you can also run the getQueries to see available.",
|
},
|
||||||
|
{
|
||||||
|
name: "Open Dock",
|
||||||
|
description: "All routes related to the opendock on the server",
|
||||||
},
|
},
|
||||||
// { name: "TMS", description: "TMS integration" },
|
// { name: "TMS", description: "TMS integration" },
|
||||||
],
|
],
|
||||||
@@ -83,14 +99,21 @@ export const openApiBase: OpenAPIV3_1.Document = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
||||||
const mergedDatamart = {
|
// const mergedDatamart = {
|
||||||
"/api/datamart": {
|
// "/api/datamart": {
|
||||||
...(getDatamartSpec["/api/datamart"] ?? {}),
|
// ...(cronerActiveJobs["/api/datamart"] ?? {}),
|
||||||
...(datamartAddSpec["/api/datamart"] ?? {}),
|
// ...(datamartAddSpec["/api/datamart"] ?? {}),
|
||||||
...(datamartUpdateSpec["/api/datamart"] ?? {}),
|
// ...(datamartUpdateSpec["/api/datamart"] ?? {}),
|
||||||
},
|
// },
|
||||||
"/api/datamart/{name}": getDatamartSpec["/api/datamart/{name}"],
|
// "/api/datamart/{name}": getDatamartSpec["/api/datamart/{name}"],
|
||||||
};
|
// };
|
||||||
|
|
||||||
|
// const mergeUtils = {
|
||||||
|
// "/api/utils/croner": {
|
||||||
|
// ...(cronerActiveJobs["/api/utils/croner"] ?? {}),
|
||||||
|
// },
|
||||||
|
// "/api/utils/{name}": cronerActiveJobs["/api/utils/{name}"],
|
||||||
|
// };
|
||||||
|
|
||||||
const fullSpec = {
|
const fullSpec = {
|
||||||
...openApiBase,
|
...openApiBase,
|
||||||
@@ -101,7 +124,10 @@ export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
|||||||
...prodRestartSpec,
|
...prodRestartSpec,
|
||||||
...prodLoginSpec,
|
...prodLoginSpec,
|
||||||
...prodRegisterSpec,
|
...prodRegisterSpec,
|
||||||
...mergedDatamart,
|
//...mergedDatamart,
|
||||||
|
...cronerActiveJobs,
|
||||||
|
...cronerStatusChange,
|
||||||
|
...openDockApt,
|
||||||
|
|
||||||
// Add more specs here as you build features
|
// Add more specs here as you build features
|
||||||
},
|
},
|
||||||
@@ -115,7 +141,9 @@ export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
|||||||
apiReference({
|
apiReference({
|
||||||
url: `${baseUrl}/api/docs.json`,
|
url: `${baseUrl}/api/docs.json`,
|
||||||
theme: "purple",
|
theme: "purple",
|
||||||
|
|
||||||
darkMode: true,
|
darkMode: true,
|
||||||
|
persistAuth: true,
|
||||||
authentication: {
|
authentication: {
|
||||||
securitySchemes: {
|
securitySchemes: {
|
||||||
httpBasic: {
|
httpBasic: {
|
||||||
@@ -137,7 +165,7 @@ export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
|
|||||||
// Clojure
|
// Clojure
|
||||||
clojure: ["clj_http"],
|
clojure: ["clj_http"],
|
||||||
// C#
|
// C#
|
||||||
csharp: ["httpclient", "restsharp"],
|
// csharp: ["httpclient", "restsharp"],
|
||||||
// Dart
|
// Dart
|
||||||
dart: ["http"],
|
dart: ["http"],
|
||||||
// F#
|
// F#
|
||||||
@@ -13,49 +13,72 @@
|
|||||||
*
|
*
|
||||||
* when a criteria is password over we will handle it by counting how many were passed up to 3 then deal with each one respectively
|
* when a criteria is password over we will handle it by counting how many were passed up to 3 then deal with each one respectively
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { eq } from "drizzle-orm";
|
|
||||||
import { db } from "../db/db.controller.js";
|
|
||||||
import { datamart } from "../db/schema/datamart.schema.js";
|
|
||||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||||
|
import {
|
||||||
|
type SqlQuery,
|
||||||
|
sqlQuerySelector,
|
||||||
|
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
import { datamartData } from "./datamartData.utlis.js";
|
||||||
|
|
||||||
|
type Options = {
|
||||||
|
name: string;
|
||||||
|
value: string;
|
||||||
|
};
|
||||||
type Data = {
|
type Data = {
|
||||||
name: string;
|
name: string;
|
||||||
options: string;
|
options: Options;
|
||||||
|
optionsRequired?: boolean;
|
||||||
|
howManyOptionsRequired?: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const runDatamartQuery = async (data: Data) => {
|
export const runDatamartQuery = async (data: Data) => {
|
||||||
// search the query db for the query by name
|
// search the query db for the query by name
|
||||||
const { data: queryInfo, error: qIe } = await tryCatch(
|
const sqlQuery = sqlQuerySelector(`${data.name}`) as SqlQuery;
|
||||||
db.select().from(datamart).where(eq(datamart.name, data.name)),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (qIe) {
|
const getDataMartInfo = datamartData.filter((x) => x.endpoint === data.name);
|
||||||
|
|
||||||
|
// const optionsMissing =
|
||||||
|
// !data.options || Object.keys(data.options).length === 0;
|
||||||
|
|
||||||
|
const optionCount =
|
||||||
|
Object.keys(data.options).length ===
|
||||||
|
getDataMartInfo[0]?.howManyOptionsRequired;
|
||||||
|
|
||||||
|
if (getDataMartInfo[0]?.optionsRequired && !optionCount) {
|
||||||
|
return returnFunc({
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "datamart",
|
||||||
|
subModule: "query",
|
||||||
|
message: `This query is required to have the ${getDataMartInfo[0]?.howManyOptionsRequired} options set in order use it.`,
|
||||||
|
data: [getDataMartInfo[0].options],
|
||||||
|
notify: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sqlQuery.success) {
|
||||||
return returnFunc({
|
return returnFunc({
|
||||||
success: false,
|
success: false,
|
||||||
level: "error",
|
level: "error",
|
||||||
module: "datamart",
|
module: "datamart",
|
||||||
subModule: "query",
|
subModule: "query",
|
||||||
message: `Error getting ${data.name} info`,
|
message: `Error getting ${data.name} info`,
|
||||||
data: [qIe],
|
data: [sqlQuery.message],
|
||||||
notify: false,
|
notify: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// create the query with no changed just to have it here
|
// create the query with no changed just to have it here
|
||||||
let datamartQuery = queryInfo[0]?.query || "";
|
let datamartQuery = sqlQuery?.query || "";
|
||||||
|
|
||||||
// split the criteria by "," then and then update the query
|
// split the criteria by "," then and then update the query
|
||||||
if (data.options !== "") {
|
if (data.options) {
|
||||||
const params = new URLSearchParams(data.options);
|
Object.entries(data.options ?? {}).forEach(([key, value]) => {
|
||||||
|
const pattern = new RegExp(`\\[${key.trim()}\\]`, "g");
|
||||||
for (const [rawKey, rawValue] of params.entries()) {
|
datamartQuery = datamartQuery.replace(pattern, String(value).trim());
|
||||||
const key = rawKey.trim();
|
});
|
||||||
const value = rawValue.trim();
|
|
||||||
datamartQuery = datamartQuery.replaceAll(`[${key}]`, value);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const { data: queryRun, error } = await tryCatch(
|
const { data: queryRun, error } = await tryCatch(
|
||||||
60
backend/datamart/datamart.routes.ts
Normal file
60
backend/datamart/datamart.routes.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import type { Express } from "express";
|
||||||
|
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { datamartData } from "./datamartData.utlis.js";
|
||||||
|
import runQuery from "./getDatamart.route.js";
|
||||||
|
|
||||||
|
export const setupDatamartRoutes = (baseUrl: string, app: Express) => {
|
||||||
|
// the sync callback.
|
||||||
|
// app.get(`${baseUrl}/api/datamart/sync`, async (req, res) => {
|
||||||
|
// const { time } = req.query;
|
||||||
|
// const now = new Date();
|
||||||
|
|
||||||
|
// const minutes = parseInt(time as string, 10) || 15;
|
||||||
|
// const cutoff = new Date(now.getTime() - minutes * 60 * 1000);
|
||||||
|
|
||||||
|
// const results = await db
|
||||||
|
// .select()
|
||||||
|
// .from(datamart)
|
||||||
|
// .where(time ? gte(datamart.upd_date, cutoff) : sql`true`);
|
||||||
|
|
||||||
|
// return apiReturn(res, {
|
||||||
|
// success: true,
|
||||||
|
// level: "info",
|
||||||
|
// module: "datamart",
|
||||||
|
// subModule: "query",
|
||||||
|
// message: `All Queries older than ${parseInt(process.env.QUERY_CHECK?.trim() || "15", 10)}min `,
|
||||||
|
// data: results,
|
||||||
|
// status: 200,
|
||||||
|
// });
|
||||||
|
// });
|
||||||
|
|
||||||
|
//setup all the routes
|
||||||
|
|
||||||
|
app.use(`${baseUrl}/api/datamart`, runQuery);
|
||||||
|
|
||||||
|
// just sending a get on datamart will return all the queries that we can call.
|
||||||
|
app.get(`${baseUrl}/api/datamart`, async (_, res) => {
|
||||||
|
// const queries = await db
|
||||||
|
// .select({
|
||||||
|
// id: datamart.id,
|
||||||
|
// name: datamart.name,
|
||||||
|
// description: datamart.description,
|
||||||
|
// options: datamart.options,
|
||||||
|
// version: datamart.version,
|
||||||
|
// upd_date: datamart.upd_date,
|
||||||
|
// })
|
||||||
|
// .from(datamart)
|
||||||
|
// .where(and(eq(datamart.active, true), eq(datamart.public, true)));
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "datamart",
|
||||||
|
subModule: "query",
|
||||||
|
message: "All active queries we can run",
|
||||||
|
data: datamartData,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
24
backend/datamart/datamartData.utlis.ts
Normal file
24
backend/datamart/datamartData.utlis.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
/**
|
||||||
|
* will store and maintain all queries for datamart here.
|
||||||
|
* this way they can all be easily maintained and updated as we progress with the changes and updates to v3
|
||||||
|
*
|
||||||
|
* for options when putting them into the docs we will show examples on how to pull this
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const datamartData = [
|
||||||
|
{
|
||||||
|
name: "Active articles",
|
||||||
|
endpoint: "activeArticles",
|
||||||
|
description: "returns all active articles for the server with custom data",
|
||||||
|
options: "", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
|
||||||
|
optionsRequired: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Delivery by date range",
|
||||||
|
endpoint: "deliveryByDateRange",
|
||||||
|
description: `Returns all Deliverys in selected date range IE: 1/1/${new Date(Date.now()).getFullYear()} to 1/31/${new Date(Date.now()).getFullYear()}`,
|
||||||
|
options: "startDate,endDate", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
|
||||||
|
optionsRequired: true,
|
||||||
|
howManyOptionsRequired: 2,
|
||||||
|
},
|
||||||
|
];
|
||||||
@@ -4,11 +4,14 @@ import { runDatamartQuery } from "./datamart.controller.js";
|
|||||||
|
|
||||||
const r = Router();
|
const r = Router();
|
||||||
|
|
||||||
|
type Options = {
|
||||||
|
name: string;
|
||||||
|
value: string;
|
||||||
|
};
|
||||||
|
|
||||||
r.get("/:name", async (req, res) => {
|
r.get("/:name", async (req, res) => {
|
||||||
const { name } = req.params;
|
const { name } = req.params;
|
||||||
const options = new URLSearchParams(
|
const options = req.query as Options;
|
||||||
req.query as Record<string, string>,
|
|
||||||
).toString();
|
|
||||||
|
|
||||||
const dataRan = await runDatamartQuery({ name, options });
|
const dataRan = await runDatamartQuery({ name, options });
|
||||||
return apiReturn(res, {
|
return apiReturn(res, {
|
||||||
72
backend/db/dbCleanup.controller.ts
Normal file
72
backend/db/dbCleanup.controller.ts
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import { delay } from "../utils/delay.utils.js";
|
||||||
|
import { db } from "./db.controller.js";
|
||||||
|
|
||||||
|
type DBCount = {
|
||||||
|
count: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const tableMap = {
|
||||||
|
logs: "logs",
|
||||||
|
jobs: "job_audit_log",
|
||||||
|
opendockApt: "opendock_apt",
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
type CleanupTable = keyof typeof tableMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We will clean up the db based on age.
|
||||||
|
* @param name database to run the cleanup on
|
||||||
|
* @param daysToKeep optional default will be 90 days
|
||||||
|
*/
|
||||||
|
export const dbCleanup = async (name: CleanupTable, daysToKeep?: number) => {
|
||||||
|
const log = createLogger({ module: "db", subModule: "cleanup" });
|
||||||
|
// TODO: send backup of this to another server, via post or something maybe have to reduce the limit but well tackle that later.
|
||||||
|
|
||||||
|
if (!daysToKeep) {
|
||||||
|
daysToKeep = 90;
|
||||||
|
}
|
||||||
|
const limit = 1000;
|
||||||
|
const delayTime = 250;
|
||||||
|
let rowsDeleted: number;
|
||||||
|
|
||||||
|
const dbCount = (await db.execute(
|
||||||
|
`select count(*) from public.${tableMap[name]} WHERE created_at < NOW() - INTERVAL '${daysToKeep} days'`,
|
||||||
|
)) as DBCount[];
|
||||||
|
|
||||||
|
const loopCount = Math.ceil(
|
||||||
|
parseInt(dbCount[0]?.count ?? `${limit}`, 10) / limit,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (parseInt(dbCount[0]?.count ?? `${limit}`, 10) > 1) {
|
||||||
|
log.info(
|
||||||
|
`Table clean up for: ${name}, that are older than ${daysToKeep} day, will be removed, There is ${loopCount} loops to be completed, Approx time: ${((loopCount * delayTime) / 1000 / 60).toFixed(2)} min(s).`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
log.info(`Table clean up for: ${name}, Currently has nothing to clean up.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
do {
|
||||||
|
// cleanup logs
|
||||||
|
const deleted = await db.execute(`
|
||||||
|
DELETE FROM public.${tableMap[name]}
|
||||||
|
WHERE id IN (
|
||||||
|
SELECT id
|
||||||
|
FROM public.${tableMap[name]}
|
||||||
|
WHERE created_at < NOW() - INTERVAL '${daysToKeep} days'
|
||||||
|
ORDER BY created_at
|
||||||
|
LIMIT ${limit}
|
||||||
|
)
|
||||||
|
RETURNING id;
|
||||||
|
`);
|
||||||
|
|
||||||
|
rowsDeleted = deleted.length;
|
||||||
|
|
||||||
|
if (rowsDeleted > 0) {
|
||||||
|
await delay(delayTime);
|
||||||
|
}
|
||||||
|
} while (rowsDeleted === limit);
|
||||||
|
|
||||||
|
log.info(`Table clean up for: ${name}, Has completed.`);
|
||||||
|
};
|
||||||
41
backend/db/schema/auditLog.schema.ts
Normal file
41
backend/db/schema/auditLog.schema.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import {
|
||||||
|
index,
|
||||||
|
integer,
|
||||||
|
jsonb,
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
uuid,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||||
|
import type { z } from "zod";
|
||||||
|
|
||||||
|
export const jobAuditLog = pgTable(
|
||||||
|
"job_audit_log",
|
||||||
|
{
|
||||||
|
id: uuid("id").defaultRandom().primaryKey(),
|
||||||
|
jobName: text("job_name"),
|
||||||
|
startedAt: timestamp("start_at"),
|
||||||
|
finishedAt: timestamp("finished_at"),
|
||||||
|
durationMs: integer("duration_ms"),
|
||||||
|
status: text("status"), //success | error
|
||||||
|
errorMessage: text("error_message"),
|
||||||
|
errorStack: text("error_stack"),
|
||||||
|
metadata: jsonb("meta_data"),
|
||||||
|
createdAt: timestamp("created_at").defaultNow(),
|
||||||
|
},
|
||||||
|
(table) => {
|
||||||
|
return {
|
||||||
|
cleanupIdx: index("idx_job_audit_logs_cleanup").on(
|
||||||
|
table.startedAt,
|
||||||
|
table.id,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const jobAuditLogSchema = createSelectSchema(jobAuditLog);
|
||||||
|
export const newJobAuditLogSchema = createInsertSchema(jobAuditLog);
|
||||||
|
|
||||||
|
export type JobAuditLog = z.infer<typeof jobAuditLogSchema>;
|
||||||
|
export type NewJobAuditLog = z.infer<typeof newJobAuditLogSchema>;
|
||||||
@@ -17,6 +17,7 @@ export const datamart = pgTable("datamart", {
|
|||||||
version: integer("version").default(1).notNull(),
|
version: integer("version").default(1).notNull(),
|
||||||
active: boolean("active").default(true),
|
active: boolean("active").default(true),
|
||||||
options: text("options").default(""),
|
options: text("options").default(""),
|
||||||
|
public: boolean("public_access").default(false),
|
||||||
add_date: timestamp("add_date").defaultNow(),
|
add_date: timestamp("add_date").defaultNow(),
|
||||||
add_user: text("add_user").default("lst-system"),
|
add_user: text("add_user").default("lst-system"),
|
||||||
upd_date: timestamp("upd_date").defaultNow(),
|
upd_date: timestamp("upd_date").defaultNow(),
|
||||||
29
backend/db/schema/notifications.schema.ts
Normal file
29
backend/db/schema/notifications.schema.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import {
|
||||||
|
boolean,
|
||||||
|
jsonb,
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
uniqueIndex,
|
||||||
|
uuid,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||||
|
import type z from "zod";
|
||||||
|
|
||||||
|
export const notifications = pgTable(
|
||||||
|
"notifications",
|
||||||
|
{
|
||||||
|
id: uuid("id").defaultRandom().primaryKey(),
|
||||||
|
name: text("name").notNull(),
|
||||||
|
description: text("description").notNull(),
|
||||||
|
active: boolean("active").default(false),
|
||||||
|
interval: text("interval").default("5"),
|
||||||
|
options: jsonb("options").default([]),
|
||||||
|
},
|
||||||
|
(table) => [uniqueIndex("notify_name").on(table.name)],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const notificationSchema = createSelectSchema(notifications);
|
||||||
|
export const newNotificationSchema = createInsertSchema(notifications);
|
||||||
|
|
||||||
|
export type Notification = z.infer<typeof notificationSchema>;
|
||||||
|
export type NewNotification = z.infer<typeof newNotificationSchema>;
|
||||||
30
backend/db/schema/notifications.sub.schema.ts
Normal file
30
backend/db/schema/notifications.sub.schema.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { pgTable, text, unique, uuid } from "drizzle-orm/pg-core";
|
||||||
|
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||||
|
import type z from "zod";
|
||||||
|
import { user } from "./auth.schema.js";
|
||||||
|
import { notifications } from "./notifications.schema.js";
|
||||||
|
|
||||||
|
export const notificationSub = pgTable(
|
||||||
|
"notification_sub",
|
||||||
|
{
|
||||||
|
id: uuid("id").defaultRandom().primaryKey(),
|
||||||
|
userId: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => user.id, { onDelete: "cascade" }),
|
||||||
|
notificationId: uuid("notification_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => notifications.id, { onDelete: "cascade" }),
|
||||||
|
emails: text("emails").array().default([]),
|
||||||
|
},
|
||||||
|
(table) => ({
|
||||||
|
userNotificationUnique: unique(
|
||||||
|
"notification_sub_user_notification_unique",
|
||||||
|
).on(table.userId, table.notificationId),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
export const notificationSubSchema = createSelectSchema(notificationSub);
|
||||||
|
export const newNotificationSubSchema = createInsertSchema(notificationSub);
|
||||||
|
|
||||||
|
export type NotificationSub = z.infer<typeof notificationSubSchema>;
|
||||||
|
export type NewNotificationSub = z.infer<typeof newNotificationSubSchema>;
|
||||||
25
backend/db/schema/opendock.schema.ts
Normal file
25
backend/db/schema/opendock.schema.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import {
|
||||||
|
integer,
|
||||||
|
jsonb,
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
uuid,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||||
|
import type { z } from "zod";
|
||||||
|
|
||||||
|
export const opendockApt = pgTable("opendock_apt", {
|
||||||
|
id: uuid("id").defaultRandom().primaryKey(),
|
||||||
|
release: integer("release").unique(),
|
||||||
|
openDockAptId: text("open_dock_apt_id").notNull(),
|
||||||
|
appointment: jsonb("appointment").default([]),
|
||||||
|
upd_date: timestamp("upd_date").defaultNow(),
|
||||||
|
createdAt: timestamp("created_at").defaultNow(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const opendockAptSchema = createSelectSchema(opendockApt);
|
||||||
|
export const newOpendockAptSchema = createInsertSchema(opendockApt);
|
||||||
|
|
||||||
|
export type OpendockApt = z.infer<typeof opendockAptSchema>;
|
||||||
|
export type NewOpendockApt = z.infer<typeof newOpendockAptSchema>;
|
||||||
6
backend/db/schema/printerLogs.schema.ts
Normal file
6
backend/db/schema/printerLogs.schema.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { integer, pgTable, text } from "drizzle-orm/pg-core";
|
||||||
|
|
||||||
|
export const opendockApt = pgTable("printer_log", {
|
||||||
|
id: integer().primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
name: text("name").notNull(),
|
||||||
|
});
|
||||||
53
backend/db/schema/settings.schema.ts
Normal file
53
backend/db/schema/settings.schema.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import {
|
||||||
|
boolean,
|
||||||
|
integer,
|
||||||
|
jsonb,
|
||||||
|
pgEnum,
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
uniqueIndex,
|
||||||
|
uuid,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const settingType = pgEnum("setting_type", [
|
||||||
|
"feature", // when changed deals with triggering the croner related to this
|
||||||
|
"system", // when changed fires a system restart but this should be rare and all these settings should be in the env
|
||||||
|
"standard", // will be effected by the next process, either croner or manual trigger
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const settings = pgTable(
|
||||||
|
"settings",
|
||||||
|
{
|
||||||
|
id: uuid("settings_id").defaultRandom().primaryKey(),
|
||||||
|
name: text("name").notNull(),
|
||||||
|
value: text("value").notNull(), // this is used in junction with active, only needed if the setting isn't a bool
|
||||||
|
description: text("description"),
|
||||||
|
moduleName: text("moduleName"), // what part of lst dose it belong to this is used to split the settings out later
|
||||||
|
active: boolean("active").default(true),
|
||||||
|
roles: jsonb("roles").$type<string[]>().notNull().default(["systemAdmin"]), // role or roles to see this goes along with the moduleName, need to have a x role in module to see this setting.
|
||||||
|
settingType: settingType(),
|
||||||
|
seedVersion: integer("seed_version").default(1), // this is intended for if we want to update the settings.
|
||||||
|
add_User: text("add_User").default("LST_System").notNull(),
|
||||||
|
add_Date: timestamp("add_Date").defaultNow(),
|
||||||
|
upd_user: text("upd_User").default("LST_System").notNull(),
|
||||||
|
upd_date: timestamp("upd_date").defaultNow(),
|
||||||
|
},
|
||||||
|
(table) => [
|
||||||
|
// uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
|
||||||
|
uniqueIndex("name").on(table.name),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const settingSchema = createSelectSchema(settings);
|
||||||
|
export const newSettingSchema = createInsertSchema(settings, {
|
||||||
|
name: z.string().min(3, {
|
||||||
|
message: "The name of the setting must be longer than 3 letters",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type Setting = z.infer<typeof settingSchema>;
|
||||||
|
export type NewSetting = z.infer<typeof newSettingSchema>;
|
||||||
10
backend/db/schema/stats.schema.ts
Normal file
10
backend/db/schema/stats.schema.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import type { InferSelectModel } from "drizzle-orm";
|
||||||
|
import { integer, pgTable, text, timestamp } from "drizzle-orm/pg-core";
|
||||||
|
|
||||||
|
export const serverStats = pgTable("stats", {
|
||||||
|
id: text("id").primaryKey().default("serverStats"),
|
||||||
|
build: integer("build").notNull().default(1),
|
||||||
|
lastUpdate: timestamp("last_update").defaultNow(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type ServerStats = InferSelectModel<typeof serverStats>;
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import build from "pino-abstract-transport";
|
import build from "pino-abstract-transport";
|
||||||
|
|
||||||
import { db } from "../db/db.controller.js";
|
import { db } from "../db/db.controller.js";
|
||||||
import { logs } from "../db/schema/logs.schema.js";
|
import { logs } from "../db/schema/logs.schema.js";
|
||||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
90
backend/logger/logger.controller.ts
Normal file
90
backend/logger/logger.controller.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import { Writable } from "node:stream";
|
||||||
|
|
||||||
|
import pino, { type Logger } from "pino";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { logs } from "../db/schema/logs.schema.js";
|
||||||
|
import { emitToRoom } from "../socket.io/roomEmitter.socket.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
//import build from "pino-abstract-transport";
|
||||||
|
|
||||||
|
export const logLevel = process.env.LOG_LEVEL || "info";
|
||||||
|
|
||||||
|
const pinoLogLevels: Record<number, string> = {
|
||||||
|
10: "trace",
|
||||||
|
20: "debug",
|
||||||
|
30: "info",
|
||||||
|
40: "warn",
|
||||||
|
50: "error",
|
||||||
|
60: "fatal",
|
||||||
|
};
|
||||||
|
|
||||||
|
// ✅ Custom DB writable stream
|
||||||
|
const dbStream = new Writable({
|
||||||
|
objectMode: true,
|
||||||
|
async write(chunk, _enc, callback) {
|
||||||
|
try {
|
||||||
|
const obj = JSON.parse(chunk.toString());
|
||||||
|
|
||||||
|
const levelName = pinoLogLevels[obj.level] || "unknown";
|
||||||
|
|
||||||
|
const res = await tryCatch(
|
||||||
|
db
|
||||||
|
.insert(logs)
|
||||||
|
.values({
|
||||||
|
level: levelName,
|
||||||
|
module: obj?.module?.toLowerCase(),
|
||||||
|
subModule: obj?.subModule?.toLowerCase(),
|
||||||
|
hostname: obj?.hostname?.toLowerCase(),
|
||||||
|
message: obj.msg,
|
||||||
|
stack: obj?.stack,
|
||||||
|
})
|
||||||
|
.returning(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (res.error) {
|
||||||
|
console.error(res.error);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj.room) {
|
||||||
|
emitToRoom(obj.room, res.data ? res.data[0] : obj);
|
||||||
|
}
|
||||||
|
emitToRoom("logs", res.data ? res.data[0] : obj);
|
||||||
|
callback();
|
||||||
|
} catch (err) {
|
||||||
|
console.error("DB log insert error:", err);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const rootLogger: Logger = pino(
|
||||||
|
{
|
||||||
|
level: logLevel,
|
||||||
|
redact: { paths: ["email", "password"], remove: true },
|
||||||
|
},
|
||||||
|
pino.multistream([
|
||||||
|
{
|
||||||
|
level: logLevel,
|
||||||
|
stream: pino.transport({
|
||||||
|
target: "pino-pretty",
|
||||||
|
options: {
|
||||||
|
colorize: true,
|
||||||
|
singleLine: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
level: logLevel,
|
||||||
|
stream: dbStream,
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* example data to put in as a reference
|
||||||
|
* rooms logs | labels | etc
|
||||||
|
*/
|
||||||
|
export const createLogger = (bindings: Record<string, unknown>): Logger => {
|
||||||
|
return rootLogger.child(bindings);
|
||||||
|
};
|
||||||
58
backend/middleware/auth.middleware.ts
Normal file
58
backend/middleware/auth.middleware.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import { fromNodeHeaders } from "better-auth/node";
|
||||||
|
import type { NextFunction, Request, Response } from "express";
|
||||||
|
import { auth } from "../utils/auth.utils.js";
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
namespace Express {
|
||||||
|
interface Request {
|
||||||
|
user?: {
|
||||||
|
id: string;
|
||||||
|
email?: string;
|
||||||
|
roles?: string | null | undefined; //Record<string, string[]>;
|
||||||
|
username?: string | null | undefined;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// function toWebHeaders(nodeHeaders: Request["headers"]): Headers {
|
||||||
|
// const h = new Headers();
|
||||||
|
// for (const [key, value] of Object.entries(nodeHeaders)) {
|
||||||
|
// if (Array.isArray(value)) {
|
||||||
|
// value.forEach((v) => h.append(key, v));
|
||||||
|
// } else if (value !== undefined) {
|
||||||
|
// h.set(key, value);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// return h;
|
||||||
|
// }
|
||||||
|
|
||||||
|
export const requireAuth = async (
|
||||||
|
req: Request,
|
||||||
|
res: Response,
|
||||||
|
next: NextFunction,
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
const session = await auth.api.getSession({
|
||||||
|
headers: fromNodeHeaders(req.headers),
|
||||||
|
//query: { disableCookieCache: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!session) {
|
||||||
|
return res.status(401).json({ error: "Unauthorized" });
|
||||||
|
}
|
||||||
|
|
||||||
|
//console.log(session);
|
||||||
|
|
||||||
|
req.user = {
|
||||||
|
id: session.user.id,
|
||||||
|
email: session.user.email,
|
||||||
|
roles: session.user.role,
|
||||||
|
username: session.user.username,
|
||||||
|
};
|
||||||
|
|
||||||
|
next();
|
||||||
|
} catch {
|
||||||
|
return res.status(401).json({ error: "Unauthorized" });
|
||||||
|
}
|
||||||
|
};
|
||||||
52
backend/middleware/auth.requiredPerms.middleware.ts
Normal file
52
backend/middleware/auth.requiredPerms.middleware.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import type { NextFunction, Request, Response } from "express";
|
||||||
|
import { auth } from "../utils/auth.utils.js";
|
||||||
|
|
||||||
|
type PermissionMap = Record<string, string[]>;
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
namespace Express {
|
||||||
|
interface Request {
|
||||||
|
authz?: {
|
||||||
|
success: boolean;
|
||||||
|
permissions: PermissionMap;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeRoles(roles: unknown): string {
|
||||||
|
if (Array.isArray(roles)) return roles.join(",");
|
||||||
|
if (typeof roles === "string") return roles;
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function requirePermission(permissions: PermissionMap) {
|
||||||
|
return async (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
try {
|
||||||
|
const role = normalizeRoles(req.user?.roles) as any;
|
||||||
|
|
||||||
|
const result = await auth.api.userHasPermission({
|
||||||
|
body: {
|
||||||
|
role,
|
||||||
|
permissions,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
req.authz = {
|
||||||
|
success: !!result?.success,
|
||||||
|
permissions,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!result?.success) {
|
||||||
|
return res.status(403).json({
|
||||||
|
ok: false,
|
||||||
|
message: "You do not have permission to perform this action.",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
37
backend/middleware/featureActive.middleware.ts
Normal file
37
backend/middleware/featureActive.middleware.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { and, eq } from "drizzle-orm";
|
||||||
|
import type { NextFunction, Request, Response } from "express";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { settings } from "../db/schema/settings.schema.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param moduleName name of the module we are checking if is enabled or not.
|
||||||
|
*/
|
||||||
|
export const featureCheck = (moduleName: string) => {
|
||||||
|
// get the features from the settings
|
||||||
|
|
||||||
|
return async (_req: Request, res: Response, next: NextFunction) => {
|
||||||
|
const { data: sData, error: sError } = await tryCatch(
|
||||||
|
db
|
||||||
|
.select()
|
||||||
|
.from(settings)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(settings.settingType, "feature"),
|
||||||
|
eq(settings.name, moduleName),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (sError) {
|
||||||
|
return res.status(403).json({ error: "Internal Error" });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sData?.length || !sData[0]?.active) {
|
||||||
|
return res.status(403).json({ error: "Feature disabled" });
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
};
|
||||||
153
backend/notification/notification.controller.ts
Normal file
153
backend/notification/notification.controller.ts
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { notifications } from "../db/schema/notifications.schema.js";
|
||||||
|
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import { minutesToCron } from "../utils/croner.minConvert.js";
|
||||||
|
import { createCronJob, stopCronJob } from "../utils/croner.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
const log = createLogger({ module: "notifications", subModule: "start" });
|
||||||
|
|
||||||
|
export const startNotifications = async () => {
|
||||||
|
// get active notification
|
||||||
|
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db.select().from(notifications).where(eq(notifications.active, true)),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error when getting notifications.",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data) {
|
||||||
|
if (data.length === 0) {
|
||||||
|
log.info(
|
||||||
|
{},
|
||||||
|
"There are know currently active notifications to start up.",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the subs and see if we have any subs currently so we can fire up the notification
|
||||||
|
const { data: sub, error: subError } = await tryCatch(
|
||||||
|
db.select().from(notificationSub),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (subError) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error when getting subscriptions.",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sub.length === 0) {
|
||||||
|
log.info({}, "There are know currently active subscriptions.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const emailString = [
|
||||||
|
...new Set(
|
||||||
|
sub.flatMap((e) =>
|
||||||
|
e.emails?.map((email) => email.trim().toLowerCase()),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
].join(";");
|
||||||
|
|
||||||
|
for (const n of data) {
|
||||||
|
createCronJob(
|
||||||
|
n.name,
|
||||||
|
minutesToCron(parseInt(n.interval ?? "15", 10)),
|
||||||
|
async () => {
|
||||||
|
try {
|
||||||
|
const { default: runFun } = await import(
|
||||||
|
`./notification.${n.name.trim()}.js`
|
||||||
|
);
|
||||||
|
await runFun(n, emailString);
|
||||||
|
} catch (error) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error starting the notification",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const modifiedNotification = async (id: string) => {
|
||||||
|
// when a notifications subscribed to, updated, deleted we want to get the info and rerun the startup on the single notification.
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db.select().from(notifications).where(eq(notifications.id, id)),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error when getting notifications.",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data) {
|
||||||
|
if (!data[0]?.active) {
|
||||||
|
stopCronJob(data[0]?.name ?? "");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the subs for the specific id as we only want to up the modified one
|
||||||
|
const { data: sub, error: subError } = await tryCatch(
|
||||||
|
db
|
||||||
|
.select()
|
||||||
|
.from(notificationSub)
|
||||||
|
.where(eq(notificationSub.notificationId, id)),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (subError) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error when getting subscriptions.",
|
||||||
|
);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sub.length === 0) {
|
||||||
|
log.info({}, "There are know currently active subscriptions.");
|
||||||
|
stopCronJob(data[0]?.name ?? "");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const emailString = [
|
||||||
|
...new Set(
|
||||||
|
sub.flatMap((e) =>
|
||||||
|
e.emails?.map((email) => email.trim().toLowerCase()),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
].join(";");
|
||||||
|
|
||||||
|
createCronJob(
|
||||||
|
data[0].name,
|
||||||
|
minutesToCron(parseInt(data[0].interval ?? "15", 10)),
|
||||||
|
async () => {
|
||||||
|
try {
|
||||||
|
const { default: runFun } = await import(
|
||||||
|
`./notification.${data[0]?.name.trim()}.js`
|
||||||
|
);
|
||||||
|
await runFun(data[0], emailString);
|
||||||
|
} catch (error) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error starting the notification",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
10
backend/notification/notification.reprintLabels.ts
Normal file
10
backend/notification/notification.reprintLabels.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
const reprint = (data: any, emails: string) => {
|
||||||
|
// TODO: do the actual logic for the notification.
|
||||||
|
console.log(data);
|
||||||
|
console.log(emails);
|
||||||
|
|
||||||
|
// TODO send the error to systemAdmin users so they do not always need to be on the notifications.
|
||||||
|
// these errors are defined per notification.
|
||||||
|
};
|
||||||
|
|
||||||
|
export default reprint;
|
||||||
55
backend/notification/notification.route.ts
Normal file
55
backend/notification/notification.route.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { type Response, Router } from "express";
|
||||||
|
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { notifications } from "../db/schema/notifications.schema.js";
|
||||||
|
import { auth } from "../utils/auth.utils.js";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.get("/", async (req, res: Response) => {
|
||||||
|
const hasPermissions = await auth.api.userHasPermission({
|
||||||
|
body: {
|
||||||
|
//userId: req?.user?.id,
|
||||||
|
role: req.user?.roles as any,
|
||||||
|
permissions: {
|
||||||
|
notifications: ["readAll"], // This must match the structure in your access control
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const { data: nName, error: nError } = await tryCatch(
|
||||||
|
db
|
||||||
|
.select()
|
||||||
|
.from(notifications)
|
||||||
|
.where(
|
||||||
|
!hasPermissions.success ? eq(notifications.active, true) : undefined,
|
||||||
|
)
|
||||||
|
.orderBy(notifications.name),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (nError) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "get",
|
||||||
|
message: `There was an error getting the notifications `,
|
||||||
|
data: [nError],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "get",
|
||||||
|
message: `All current notifications`,
|
||||||
|
data: nName ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
export default r;
|
||||||
20
backend/notification/notification.routes.ts
Normal file
20
backend/notification/notification.routes.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import type { Express } from "express";
|
||||||
|
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||||
|
import getNotifications from "./notification.route.js";
|
||||||
|
import updateNote from "./notification.update.route.js";
|
||||||
|
import deleteSub from "./notificationSub.delete.route.js";
|
||||||
|
import subs from "./notificationSub.get.route.js";
|
||||||
|
import newSub from "./notificationSub.post.route.js";
|
||||||
|
import updateSub from "./notificationSub.update.route.js";
|
||||||
|
|
||||||
|
export const setupNotificationRoutes = (baseUrl: string, app: Express) => {
|
||||||
|
//stats will be like this as we dont need to change this
|
||||||
|
app.use(`${baseUrl}/api/notification`, requireAuth, getNotifications);
|
||||||
|
app.use(`${baseUrl}/api/notification`, requireAuth, updateNote);
|
||||||
|
app.use(`${baseUrl}/api/notification/sub`, requireAuth, subs);
|
||||||
|
app.use(`${baseUrl}/api/notification/sub`, requireAuth, newSub);
|
||||||
|
app.use(`${baseUrl}/api/notification/sub`, requireAuth, updateSub);
|
||||||
|
app.use(`${baseUrl}/api/notification/sub`, requireAuth, deleteSub);
|
||||||
|
|
||||||
|
// all other system should be under /api/system/*
|
||||||
|
};
|
||||||
81
backend/notification/notification.update.route.ts
Normal file
81
backend/notification/notification.update.route.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { type Response, Router } from "express";
|
||||||
|
import z from "zod";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { notifications } from "../db/schema/notifications.schema.js";
|
||||||
|
import { requirePermission } from "../middleware/auth.requiredPerms.middleware.js";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
import { modifiedNotification } from "./notification.controller.js";
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
const updateNote = z.object({
|
||||||
|
description: z.string().optional(),
|
||||||
|
active: z.boolean().optional(),
|
||||||
|
interval: z.string().optional(),
|
||||||
|
options: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
r.patch(
|
||||||
|
"/:id",
|
||||||
|
requirePermission({ notifications: ["update"] }),
|
||||||
|
async (req, res: Response) => {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const validated = updateNote.parse(req.body);
|
||||||
|
|
||||||
|
const { data: nName, error: nError } = await tryCatch(
|
||||||
|
db
|
||||||
|
.update(notifications)
|
||||||
|
.set(validated)
|
||||||
|
.where(eq(notifications.id, id as string))
|
||||||
|
.returning(),
|
||||||
|
);
|
||||||
|
|
||||||
|
await modifiedNotification(id as string);
|
||||||
|
|
||||||
|
if (nError) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "update",
|
||||||
|
message: `There was an error getting the notifications `,
|
||||||
|
data: [nError],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "update",
|
||||||
|
message: `Notification was updated`,
|
||||||
|
data: nName ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof z.ZodError) {
|
||||||
|
const flattened = z.flattenError(err);
|
||||||
|
// return res.status(400).json({
|
||||||
|
// error: "Validation failed",
|
||||||
|
// details: flattened,
|
||||||
|
// });
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error", //connect.success ? "info" : "error",
|
||||||
|
module: "routes",
|
||||||
|
subModule: "notification",
|
||||||
|
message: "Validation failed",
|
||||||
|
data: [flattened.fieldErrors],
|
||||||
|
status: 400, //connect.success ? 200 : 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
export default r;
|
||||||
76
backend/notification/notificationSub.delete.route.ts
Normal file
76
backend/notification/notificationSub.delete.route.ts
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import { and, eq } from "drizzle-orm";
|
||||||
|
import { type Response, Router } from "express";
|
||||||
|
import z from "zod";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
import { modifiedNotification } from "./notification.controller.js";
|
||||||
|
|
||||||
|
const newSubscribe = z.object({
|
||||||
|
emails: z.email().array().describe("An array of emails"),
|
||||||
|
userId: z.string().describe("User id."),
|
||||||
|
notificationId: z.string().describe("Notification id"),
|
||||||
|
});
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.delete("/", async (req, res: Response) => {
|
||||||
|
try {
|
||||||
|
const validated = newSubscribe.parse(req.body);
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db
|
||||||
|
.delete(notificationSub)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(notificationSub.userId, validated.userId),
|
||||||
|
eq(notificationSub.notificationId, validated.notificationId),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.returning(),
|
||||||
|
);
|
||||||
|
|
||||||
|
await modifiedNotification(validated.notificationId);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "post",
|
||||||
|
message: `There was an error deleting the subscription `,
|
||||||
|
data: [error],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "post",
|
||||||
|
message: `Subscription deleted`,
|
||||||
|
data: data ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof z.ZodError) {
|
||||||
|
const flattened = z.flattenError(err);
|
||||||
|
// return res.status(400).json({
|
||||||
|
// error: "Validation failed",
|
||||||
|
// details: flattened,
|
||||||
|
// });
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error", //connect.success ? "info" : "error",
|
||||||
|
module: "routes",
|
||||||
|
subModule: "notification",
|
||||||
|
message: "Validation failed",
|
||||||
|
data: [flattened.fieldErrors],
|
||||||
|
status: 400, //connect.success ? 200 : 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
export default r;
|
||||||
57
backend/notification/notificationSub.get.route.ts
Normal file
57
backend/notification/notificationSub.get.route.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { type Response, Router } from "express";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||||
|
import { auth } from "../utils/auth.utils.js";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.get("/", async (req, res: Response) => {
|
||||||
|
const { userId } = req.query;
|
||||||
|
|
||||||
|
const hasPermissions = await auth.api.userHasPermission({
|
||||||
|
body: {
|
||||||
|
//userId: req?.user?.id,
|
||||||
|
role: req.user?.roles as any,
|
||||||
|
permissions: {
|
||||||
|
notifications: ["readAll"], // This must match the structure in your access control
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db
|
||||||
|
.select()
|
||||||
|
.from(notificationSub)
|
||||||
|
.where(
|
||||||
|
userId || !hasPermissions.success
|
||||||
|
? eq(notificationSub.userId, `${req?.user?.id ?? ""}`)
|
||||||
|
: undefined,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "post",
|
||||||
|
message: `There was an error getting subscriptions `,
|
||||||
|
data: [error],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "post",
|
||||||
|
message: `Subscriptions`,
|
||||||
|
data: data ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
export default r;
|
||||||
75
backend/notification/notificationSub.post.route.ts
Normal file
75
backend/notification/notificationSub.post.route.ts
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import { type Response, Router } from "express";
|
||||||
|
import z from "zod";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
import { modifiedNotification } from "./notification.controller.js";
|
||||||
|
|
||||||
|
const newSubscribe = z.object({
|
||||||
|
emails: z
|
||||||
|
.email()
|
||||||
|
.array()
|
||||||
|
|
||||||
|
.describe("An array of emails"),
|
||||||
|
userId: z.string().describe("User id."),
|
||||||
|
notificationId: z
|
||||||
|
.string()
|
||||||
|
|
||||||
|
.describe("Notification id"),
|
||||||
|
});
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.post("/", async (req, res: Response) => {
|
||||||
|
try {
|
||||||
|
const validated = newSubscribe.parse(req.body);
|
||||||
|
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db.insert(notificationSub).values(validated).returning(),
|
||||||
|
);
|
||||||
|
|
||||||
|
await modifiedNotification(validated.notificationId);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "post",
|
||||||
|
message: `There was an error getting the notifications `,
|
||||||
|
data: [error],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "post",
|
||||||
|
message: `Subscribed to notification`,
|
||||||
|
data: data ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof z.ZodError) {
|
||||||
|
const flattened = z.flattenError(err);
|
||||||
|
// return res.status(400).json({
|
||||||
|
// error: "Validation failed",
|
||||||
|
// details: flattened,
|
||||||
|
// });
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error", //connect.success ? "info" : "error",
|
||||||
|
module: "routes",
|
||||||
|
subModule: "notification",
|
||||||
|
message: "Validation failed",
|
||||||
|
data: [flattened.fieldErrors],
|
||||||
|
status: 400, //connect.success ? 200 : 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
export default r;
|
||||||
84
backend/notification/notificationSub.update.route.ts
Normal file
84
backend/notification/notificationSub.update.route.ts
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import { and, eq } from "drizzle-orm";
|
||||||
|
import { type Response, Router } from "express";
|
||||||
|
import z from "zod";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { notificationSub } from "../db/schema/notifications.sub.schema.js";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
import { modifiedNotification } from "./notification.controller.js";
|
||||||
|
|
||||||
|
const newSubscribe = z.object({
|
||||||
|
emails: z.email().array().describe("An array of emails"),
|
||||||
|
userId: z.string().describe("User id."),
|
||||||
|
notificationId: z.string().describe("Notification id"),
|
||||||
|
});
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.patch("/", async (req, res: Response) => {
|
||||||
|
try {
|
||||||
|
const validated = newSubscribe.parse(req.body);
|
||||||
|
|
||||||
|
const emails = validated.emails
|
||||||
|
.map((e) => e.trim().toLowerCase())
|
||||||
|
.filter(Boolean);
|
||||||
|
|
||||||
|
const uniqueEmails = [...new Set(emails)];
|
||||||
|
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db
|
||||||
|
.update(notificationSub)
|
||||||
|
.set({ emails: uniqueEmails })
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(notificationSub.userId, validated.userId),
|
||||||
|
eq(notificationSub.notificationId, validated.notificationId),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.returning(),
|
||||||
|
);
|
||||||
|
|
||||||
|
await modifiedNotification(validated.notificationId);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "update",
|
||||||
|
message: `There was an error updating the notifications `,
|
||||||
|
data: [error],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "notification",
|
||||||
|
subModule: "update",
|
||||||
|
message: `Subscription updated`,
|
||||||
|
data: data ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof z.ZodError) {
|
||||||
|
const flattened = z.flattenError(err);
|
||||||
|
// return res.status(400).json({
|
||||||
|
// error: "Validation failed",
|
||||||
|
// details: flattened,
|
||||||
|
// });
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error", //connect.success ? "info" : "error",
|
||||||
|
module: "routes",
|
||||||
|
subModule: "notification",
|
||||||
|
message: "Validation failed",
|
||||||
|
data: [flattened.fieldErrors],
|
||||||
|
status: 400, //connect.success ? 200 : 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
export default r;
|
||||||
50
backend/notification/notifications.master.ts
Normal file
50
backend/notification/notifications.master.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import { sql } from "drizzle-orm";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import {
|
||||||
|
type NewNotification,
|
||||||
|
notifications,
|
||||||
|
} from "../db/schema/notifications.schema.js";
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
const note: NewNotification[] = [
|
||||||
|
{
|
||||||
|
name: "reprintLabels",
|
||||||
|
description:
|
||||||
|
"Monitors the labels that are printed and returns a there data, if one falls withing the time frame.",
|
||||||
|
active: false,
|
||||||
|
interval: "10",
|
||||||
|
options: [{ prodID: 1 }],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
export const createNotifications = async () => {
|
||||||
|
const log = createLogger({ module: "notifications", subModule: "create" });
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db
|
||||||
|
.insert(notifications)
|
||||||
|
.values(note)
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: notifications.name,
|
||||||
|
set: {
|
||||||
|
description: sql`excluded.description`,
|
||||||
|
},
|
||||||
|
// where: sql`
|
||||||
|
// settings.seed_version IS NULL
|
||||||
|
// OR settings.seed_version < excluded.seed_version
|
||||||
|
// `,
|
||||||
|
})
|
||||||
|
.returning(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error when adding or updating the notifications.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data) {
|
||||||
|
log.info({}, "All notifications were added/updated");
|
||||||
|
}
|
||||||
|
};
|
||||||
36
backend/ocp/ocp.printer.listener.ts
Normal file
36
backend/ocp/ocp.printer.listener.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
/**
|
||||||
|
* the route that listens for the printers post.
|
||||||
|
*
|
||||||
|
* and http-post alert should be setup on each printer pointing to at min you will want to make the alert for
|
||||||
|
* pause printer, you can have all on here as it will also monitor and do things on all messages
|
||||||
|
*
|
||||||
|
* http://{serverIP}:2222/lst/api/ocp/printer/listener/{printerName}
|
||||||
|
*
|
||||||
|
* the messages will be sent over to the db for logging as well as specific ones will do something
|
||||||
|
*
|
||||||
|
* pause will validate if can print
|
||||||
|
* close head will repause the printer so it wont print a label
|
||||||
|
* power up will just repause the printer so it wont print a label
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Router } from "express";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.post("/printer/listener/:printer", async (req, res) => {
|
||||||
|
const { printer: printerName } = req.params;
|
||||||
|
console.log(req.body);
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "ocp",
|
||||||
|
subModule: "printing",
|
||||||
|
message: `${printerName} just passed over a message`,
|
||||||
|
data: req.body ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default r;
|
||||||
19
backend/ocp/ocp.printer.manage.ts
Normal file
19
backend/ocp/ocp.printer.manage.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
/**
|
||||||
|
* this will do a prod sync, update or add alerts to the printer, validate the next pm intervale as well as head replacement.
|
||||||
|
*
|
||||||
|
* if a printer is upcoming on a pm or head replacement send to the plant to address.
|
||||||
|
*
|
||||||
|
* a trigger on the printer table will have the ability to run this as well
|
||||||
|
*
|
||||||
|
* heat beats on all assigned printers
|
||||||
|
*
|
||||||
|
* printer status will live here this will be how we manage all the levels of status like 3 paused, 1 printing, 8 error, 10 power up, etc...
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const printerManager = async () => {};
|
||||||
|
|
||||||
|
export const printerHeartBeat = async () => {
|
||||||
|
// heat heats will be defaulted to 60 seconds no reason to allow anything else
|
||||||
|
};
|
||||||
|
|
||||||
|
//export const printerStatus = async (statusNr: number, printerId: number) => {};
|
||||||
22
backend/ocp/ocp.routes.ts
Normal file
22
backend/ocp/ocp.routes.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { type Express, Router } from "express";
|
||||||
|
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||||
|
import { featureCheck } from "../middleware/featureActive.middleware.js";
|
||||||
|
import listener from "./ocp.printer.listener.js";
|
||||||
|
|
||||||
|
export const setupOCPRoutes = (baseUrl: string, app: Express) => {
|
||||||
|
//setup all the routes
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
// is the feature even on?
|
||||||
|
router.use(featureCheck("ocp"));
|
||||||
|
|
||||||
|
// non auth routes up here
|
||||||
|
router.use(listener);
|
||||||
|
|
||||||
|
// auth routes below here
|
||||||
|
router.use(requireAuth);
|
||||||
|
|
||||||
|
//router.use("");
|
||||||
|
|
||||||
|
app.use(`${baseUrl}/api/ocp`, router);
|
||||||
|
};
|
||||||
481
backend/opendock/openDockRreleaseMonitor.utils.ts
Normal file
481
backend/opendock/openDockRreleaseMonitor.utils.ts
Normal file
@@ -0,0 +1,481 @@
|
|||||||
|
import axios from "axios";
|
||||||
|
import { addHours } from "date-fns";
|
||||||
|
import { formatInTimeZone } from "date-fns-tz";
|
||||||
|
import { eq, sql } from "drizzle-orm";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { opendockApt } from "../db/schema/opendock.schema.js";
|
||||||
|
import { settings } from "../db/schema/settings.schema.js";
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||||
|
import {
|
||||||
|
type SqlQuery,
|
||||||
|
sqlQuerySelector,
|
||||||
|
} from "../prodSql/prodSqlQuerySelector.utils.js";
|
||||||
|
import { createCronJob } from "../utils/croner.utils.js";
|
||||||
|
import { delay } from "../utils/delay.utils.js";
|
||||||
|
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
import { getToken, odToken } from "./opendock.utils.js";
|
||||||
|
|
||||||
|
let lastCheck = formatInTimeZone(
|
||||||
|
new Date().toISOString(),
|
||||||
|
"America/New_York",
|
||||||
|
"yyyy-MM-dd HH:mm:ss",
|
||||||
|
);
|
||||||
|
|
||||||
|
//const queue: unknown[] = [];
|
||||||
|
//const isProcessing: boolean = false;
|
||||||
|
|
||||||
|
type Releases = {
|
||||||
|
ReleaseNumber: number;
|
||||||
|
DeliveryState: number;
|
||||||
|
DeliveryDate: Date;
|
||||||
|
LineItemHumanReadableId: number;
|
||||||
|
ArticleAlias: string;
|
||||||
|
LoadingUnits: string;
|
||||||
|
Quantity: number;
|
||||||
|
LineItemArticleWeight: number;
|
||||||
|
CustomerReleaseNumber: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const TWENTY_FOUR_HOURS = 24 * 60 * 60 * 1000;
|
||||||
|
const log = createLogger({ module: "opendock", subModule: "releaseMonitor" });
|
||||||
|
|
||||||
|
const postRelease = async (release: Releases) => {
|
||||||
|
if (!odToken.odToken) {
|
||||||
|
log.info({}, "Getting Auth Token");
|
||||||
|
await getToken();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
new Date(odToken.tokenDate || Date.now()).getTime() <
|
||||||
|
Date.now() - TWENTY_FOUR_HOURS
|
||||||
|
) {
|
||||||
|
log.info({}, "Refreshing Auth Token");
|
||||||
|
await getToken();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* ReleaseState
|
||||||
|
* 0 = open
|
||||||
|
* 1 = planned
|
||||||
|
* 2 = CustomCanceled
|
||||||
|
* 4 = internally canceled
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DeliveryState
|
||||||
|
* 0 = open
|
||||||
|
* 1 = inprogress
|
||||||
|
* 2 = loading
|
||||||
|
* 3 = partly shipped
|
||||||
|
* 4 = delivered
|
||||||
|
*/
|
||||||
|
|
||||||
|
const newDockApt = {
|
||||||
|
status:
|
||||||
|
release.DeliveryState === 0 || release.DeliveryState === 1
|
||||||
|
? "Scheduled"
|
||||||
|
: release.DeliveryState === 2
|
||||||
|
? "InProgress"
|
||||||
|
: release.DeliveryState === 3 // this will consider finished and if a correction needs made to the bol we need to cancel and reactivate the order
|
||||||
|
? "Completed"
|
||||||
|
: release.DeliveryState === 4 && "Completed",
|
||||||
|
userId: process.env.DEFAULT_CARRIER, // this should be the carrierid
|
||||||
|
loadTypeId: process.env.DEFAULT_LOAD_TYPE, // well get this and make it a default one
|
||||||
|
dockId: process.env.DEFAULT_DOCK, // this the warehouse we want it in to start out
|
||||||
|
refNumbers: [release.ReleaseNumber],
|
||||||
|
//refNumber: release.ReleaseNumber,
|
||||||
|
start: release.DeliveryDate,
|
||||||
|
end: addHours(release.DeliveryDate, 1),
|
||||||
|
notes: "",
|
||||||
|
ccEmails: [""],
|
||||||
|
muteNotifications: true,
|
||||||
|
metadata: {
|
||||||
|
externalValidationFailed: false,
|
||||||
|
externalValidationErrorMessage: null,
|
||||||
|
},
|
||||||
|
units: null,
|
||||||
|
customFields: [
|
||||||
|
{
|
||||||
|
name: "strArticle",
|
||||||
|
type: "str",
|
||||||
|
label: "Article",
|
||||||
|
value: `${release.LineItemHumanReadableId} - ${release.ArticleAlias}`,
|
||||||
|
description: "What bottle are we sending ",
|
||||||
|
placeholder: "",
|
||||||
|
dropDownValues: [],
|
||||||
|
minLengthOrValue: 1,
|
||||||
|
hiddenFromCarrier: false,
|
||||||
|
requiredForCarrier: false,
|
||||||
|
requiredForWarehouse: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "intPallet Count",
|
||||||
|
type: "int",
|
||||||
|
label: "Pallet Count",
|
||||||
|
value: parseInt(release.LoadingUnits, 10), // do we really want to update this if its partial load as it should have been the full amount?
|
||||||
|
description: "How many pallets",
|
||||||
|
placeholder: "22",
|
||||||
|
dropDownValues: [],
|
||||||
|
minLengthOrValue: 1,
|
||||||
|
hiddenFromCarrier: false,
|
||||||
|
requiredForCarrier: false,
|
||||||
|
requiredForWarehouse: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "strTotal Weight",
|
||||||
|
type: "str",
|
||||||
|
label: "Total Weight",
|
||||||
|
value: `${(((release.Quantity * release.LineItemArticleWeight) / 1000) * 2.20462).toFixed(2)}`,
|
||||||
|
description: "What is the total weight of the load",
|
||||||
|
placeholder: "",
|
||||||
|
dropDownValues: [],
|
||||||
|
minLengthOrValue: 1,
|
||||||
|
hiddenFromCarrier: false,
|
||||||
|
requiredForCarrier: false,
|
||||||
|
requiredForWarehouse: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "strCustomer ReleaseNumber",
|
||||||
|
type: "str",
|
||||||
|
label: "Customer Release Number",
|
||||||
|
value: `${release.CustomerReleaseNumber}`,
|
||||||
|
description: "What is the customer release number",
|
||||||
|
placeholder: "",
|
||||||
|
dropDownValues: [],
|
||||||
|
minLengthOrValue: 1,
|
||||||
|
hiddenFromCarrier: false,
|
||||||
|
requiredForCarrier: false,
|
||||||
|
requiredForWarehouse: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: pull the current added releases from the db and if one matches then we want to get its id and run the update vs create
|
||||||
|
const { data: apt, error: aptError } = await tryCatch(
|
||||||
|
db.select().from(opendockApt),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (aptError) {
|
||||||
|
log.error({ error: aptError }, "Error getting apt data");
|
||||||
|
// TODO: send an error email on this one as it will cause issues
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const releaseCheck = apt.filter((r) => r.release === release.ReleaseNumber);
|
||||||
|
|
||||||
|
//console.log(releaseCheck);
|
||||||
|
|
||||||
|
if (releaseCheck.length > 0) {
|
||||||
|
const id = releaseCheck[0]?.openDockAptId;
|
||||||
|
try {
|
||||||
|
const response = await axios.patch(
|
||||||
|
`${process.env.OPENDOCK_URL}/appointment/${id}`,
|
||||||
|
newDockApt,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
"content-type": "application/json; charset=utf-8",
|
||||||
|
Authorization: `Bearer ${odToken.odToken}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (response.status === 400) {
|
||||||
|
log.error({}, response.data.data.message);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// update the release in the db leaving as insert just incase something weird happened
|
||||||
|
try {
|
||||||
|
await db
|
||||||
|
.insert(opendockApt)
|
||||||
|
.values({
|
||||||
|
release: release.ReleaseNumber,
|
||||||
|
openDockAptId: response.data.data.id,
|
||||||
|
appointment: response.data.data,
|
||||||
|
})
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: opendockApt.release,
|
||||||
|
set: { appointment: response.data.data, upd_date: sql`NOW()` },
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
log.info({}, `${release.ReleaseNumber} was updated`);
|
||||||
|
} catch (e) {
|
||||||
|
log.error(
|
||||||
|
{ error: e },
|
||||||
|
`Error updating the release: ${release.ReleaseNumber}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: to many possibilities
|
||||||
|
} catch (e: any) {
|
||||||
|
//console.info(newDockApt);
|
||||||
|
log.error(
|
||||||
|
{ error: e.response.data },
|
||||||
|
`An error has occurred during patching of the release: ${release.ReleaseNumber}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
const response = await axios.post(
|
||||||
|
`${process.env.OPENDOCK_URL}/appointment`,
|
||||||
|
newDockApt,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
"content-type": "application/json; charset=utf-8",
|
||||||
|
Authorization: `Bearer ${odToken.odToken}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// we need the id,release#,status from this response, store it in lst, check if we have a release so we can just update it.
|
||||||
|
// this will be utilized when we are listening for the changes to the apts. that way we can update the state to arrived. we will run our own checks on this guy during the incoming messages.
|
||||||
|
|
||||||
|
if (response.status === 400) {
|
||||||
|
log.error({}, response.data.data.message);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// the response to make it simple we want response.data.id, response.data.relNumber, status will be defaulted to Scheduled if we created it here.
|
||||||
|
// TODO: add this release data to our db. but save it in json format and well parse it out. that way we future proof it and have everything in here vs just a few things
|
||||||
|
//console.info(response.data.data, "Was Created");
|
||||||
|
try {
|
||||||
|
await db
|
||||||
|
.insert(opendockApt)
|
||||||
|
.values({
|
||||||
|
release: release.ReleaseNumber,
|
||||||
|
openDockAptId: response.data.data.id,
|
||||||
|
appointment: response.data.data,
|
||||||
|
})
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: opendockApt.id,
|
||||||
|
set: { appointment: response.data.data, upd_date: sql`NOW()` },
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
log.info({}, `${release.ReleaseNumber} was created`);
|
||||||
|
} catch (e) {
|
||||||
|
log.error({ error: e }, "Error creating new release");
|
||||||
|
}
|
||||||
|
// biome-ignore lint/suspicious/noExplicitAny: to many possibilities
|
||||||
|
} catch (e: any) {
|
||||||
|
log.error(
|
||||||
|
{ error: e?.response?.data },
|
||||||
|
"Error posting new release to opendock",
|
||||||
|
);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await delay(500); // rate limit protection
|
||||||
|
};
|
||||||
|
|
||||||
|
export const monitorReleaseChanges = async () => {
|
||||||
|
// TODO: validate if the setting for opendocks is active and start / stop the system based on this
|
||||||
|
// if it changes we set to false and the next loop will stop.
|
||||||
|
|
||||||
|
const openDockMonitor = await db
|
||||||
|
.select()
|
||||||
|
.from(settings)
|
||||||
|
.where(eq(settings.name, "opendock_sync"));
|
||||||
|
// console.info("Starting release monitor", lastCheck);
|
||||||
|
|
||||||
|
const sqlQuery = sqlQuerySelector(`releaseChecks`) as SqlQuery;
|
||||||
|
|
||||||
|
if (!sqlQuery.success) {
|
||||||
|
return returnFunc({
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "datamart",
|
||||||
|
subModule: "query",
|
||||||
|
message: `Error getting releaseChecks info`,
|
||||||
|
data: [sqlQuery.message],
|
||||||
|
notify: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (openDockMonitor[0]?.active) {
|
||||||
|
createCronJob("opendock_sync", "*/15 * * * * *", async () => {
|
||||||
|
try {
|
||||||
|
const result = await prodQuery(
|
||||||
|
sqlQuery.query.replace("[dateCheck]", `'${lastCheck}'`),
|
||||||
|
"Get release info",
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.data.length) {
|
||||||
|
for (const release of result.data) {
|
||||||
|
await postRelease(release);
|
||||||
|
|
||||||
|
lastCheck = formatInTimeZone(
|
||||||
|
new Date(release.Upd_Date).toISOString(),
|
||||||
|
"UTC",
|
||||||
|
"yyyy-MM-dd HH:mm:ss",
|
||||||
|
);
|
||||||
|
|
||||||
|
await delay(500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(
|
||||||
|
{ error: e },
|
||||||
|
"Error occurred while running the monitor job",
|
||||||
|
);
|
||||||
|
log.error({ error: e }, "Error occurred while running the monitor job");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// run the main game loop
|
||||||
|
// while (openDockSetting) {
|
||||||
|
// try {
|
||||||
|
// const result = await prodQuery(
|
||||||
|
// sqlQuery.query.replace("[dateCheck]", `'${lastCheck}'`),
|
||||||
|
// "Get release info",
|
||||||
|
// );
|
||||||
|
|
||||||
|
// if (result.data.length) {
|
||||||
|
// for (const release of result.data) {
|
||||||
|
// // potentially move this to a buffer table to easy up on memory
|
||||||
|
// await postRelease(release);
|
||||||
|
|
||||||
|
// // Move checkpoint AFTER successful post
|
||||||
|
// lastCheck = formatInTimeZone(
|
||||||
|
// new Date(release.Upd_Date).toISOString(),
|
||||||
|
// "UTC",
|
||||||
|
// "yyyy-MM-dd HH:mm:ss",
|
||||||
|
// );
|
||||||
|
|
||||||
|
// await delay(500);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// } catch (e) {
|
||||||
|
// console.error("Monitor error:", e);
|
||||||
|
// }
|
||||||
|
|
||||||
|
// await delay(15 * 1000); // making this 15 seconds as we would really only see issues if we have a mass burst.
|
||||||
|
// }
|
||||||
|
};
|
||||||
|
|
||||||
|
// export const monitorReleaseChanges = async () => {
|
||||||
|
// console.log("Starting release monitor", lastCheck);
|
||||||
|
// setInterval(async () => {
|
||||||
|
// try {
|
||||||
|
// const result = await prodQuery(
|
||||||
|
// releaseQuery.replace("[dateCheck]", `'${lastCheck}'`),
|
||||||
|
// "get last release change",
|
||||||
|
// );
|
||||||
|
|
||||||
|
// //console.log(releaseQuery.replace("[dateCheck]", `'${lastCheck}'`));
|
||||||
|
// if (result.data.length > 0) {
|
||||||
|
// console.log(
|
||||||
|
// formatInTimeZone(
|
||||||
|
// result.data[result.data.length - 1].Upd_Date,
|
||||||
|
// "UTC",
|
||||||
|
// "yyyy-MM-dd HH:mm:ss",
|
||||||
|
// ),
|
||||||
|
// lastCheck,
|
||||||
|
// );
|
||||||
|
// lastCheck = formatInTimeZone(
|
||||||
|
// result.data[result.data.length - 1].Upd_Date,
|
||||||
|
// "UTC",
|
||||||
|
// "yyyy-MM-dd HH:mm:ss",
|
||||||
|
// );
|
||||||
|
// const releases = result.data;
|
||||||
|
// for (let i = 0; i < releases.length; i++) {
|
||||||
|
// const newDockApt = {
|
||||||
|
// status: "Scheduled",
|
||||||
|
// userId: "ee956455-e193-47fc-b53b-dff30fabdf4b", // this should be the carrierid
|
||||||
|
// loadTypeId: "0aa7988e-b17b-4f10-acdd-3d029b44a773", // well get this and make it a default one
|
||||||
|
// dockId: "00ba4386-ce5a-4dd1-9356-6e6d10a24609", // this the warehouse we want it in to start out
|
||||||
|
// refNumbers: [releases[i].ReleaseNumber],
|
||||||
|
// refNumber: releases[i].ReleaseNumber,
|
||||||
|
// start: releases[i].DeliveryDate,
|
||||||
|
// end: addHours(releases[i].DeliveryDate, 1),
|
||||||
|
// notes: "",
|
||||||
|
// ccEmails: [""],
|
||||||
|
// muteNotifications: true,
|
||||||
|
// metadata: {
|
||||||
|
// externalValidationFailed: false,
|
||||||
|
// externalValidationErrorMessage: null,
|
||||||
|
// },
|
||||||
|
// units: null,
|
||||||
|
// customFields: [
|
||||||
|
// {
|
||||||
|
// name: "strArticle",
|
||||||
|
// type: "str",
|
||||||
|
// label: "Article",
|
||||||
|
// value: `${releases[i].LineItemHumanReadableId} - ${releases[i].ArticleAlias}`,
|
||||||
|
// description: "What bottle are we sending ",
|
||||||
|
// placeholder: "",
|
||||||
|
// dropDownValues: [],
|
||||||
|
// minLengthOrValue: 1,
|
||||||
|
// hiddenFromCarrier: false,
|
||||||
|
// requiredForCarrier: false,
|
||||||
|
// requiredForWarehouse: false,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "intPallet Count",
|
||||||
|
// type: "int",
|
||||||
|
// label: "Pallet Count",
|
||||||
|
// value: parseInt(releases[i].LoadingUnits, 10),
|
||||||
|
// description: "How many pallets",
|
||||||
|
// placeholder: "22",
|
||||||
|
// dropDownValues: [],
|
||||||
|
// minLengthOrValue: 1,
|
||||||
|
// hiddenFromCarrier: false,
|
||||||
|
// requiredForCarrier: false,
|
||||||
|
// requiredForWarehouse: false,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "strTotal Weight",
|
||||||
|
// type: "str",
|
||||||
|
// label: "Total Weight",
|
||||||
|
// value: `${(((releases[i].Quantity * releases[i].LineItemArticleWeight) / 1000) * 2.20462).toFixed(2)}`,
|
||||||
|
// description: "What is the total weight of the load",
|
||||||
|
// placeholder: "",
|
||||||
|
// dropDownValues: [],
|
||||||
|
// minLengthOrValue: 1,
|
||||||
|
// hiddenFromCarrier: false,
|
||||||
|
// requiredForCarrier: false,
|
||||||
|
// requiredForWarehouse: false,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "strCustomer ReleaseNumber",
|
||||||
|
// type: "str",
|
||||||
|
// label: "Customer Release Number",
|
||||||
|
// value: `${releases[i].CustomerReleaseNumber}`,
|
||||||
|
// description: "What is the customer release number",
|
||||||
|
// placeholder: "",
|
||||||
|
// dropDownValues: [],
|
||||||
|
// minLengthOrValue: 1,
|
||||||
|
// hiddenFromCarrier: false,
|
||||||
|
// requiredForCarrier: false,
|
||||||
|
// requiredForWarehouse: false,
|
||||||
|
// },
|
||||||
|
// ],
|
||||||
|
// };
|
||||||
|
|
||||||
|
// //console.log(newDockApt);
|
||||||
|
|
||||||
|
// const newDockResult = await axios.post(
|
||||||
|
// "https://neutron.staging.opendock.com/appointment",
|
||||||
|
// newDockApt,
|
||||||
|
// {
|
||||||
|
// headers: {
|
||||||
|
// "content-type": "application/json; charset=utf-8",
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// );
|
||||||
|
|
||||||
|
// console.log(newDockResult.statusText);
|
||||||
|
// await delay(500);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// } catch (e) {
|
||||||
|
// console.log(e);
|
||||||
|
// }
|
||||||
|
// }, 5 * 1000);
|
||||||
|
// };
|
||||||
19
backend/opendock/opendock.routes.ts
Normal file
19
backend/opendock/opendock.routes.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { type Express, Router } from "express";
|
||||||
|
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||||
|
import { featureCheck } from "../middleware/featureActive.middleware.js";
|
||||||
|
import getApt from "./opendockGetRelease.route.js";
|
||||||
|
|
||||||
|
export const setupOpendockRoutes = (baseUrl: string, app: Express) => {
|
||||||
|
//setup all the routes
|
||||||
|
// Apply auth to entire router
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
// is the feature even on?
|
||||||
|
router.use(featureCheck("opendock_sync"));
|
||||||
|
|
||||||
|
// we need to make sure we are authenticated to see the releases
|
||||||
|
router.use(requireAuth);
|
||||||
|
|
||||||
|
router.use(getApt);
|
||||||
|
app.use(`${baseUrl}/api/opendock`, router);
|
||||||
|
};
|
||||||
35
backend/opendock/opendock.utils.ts
Normal file
35
backend/opendock/opendock.utils.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import axios from "axios";
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
|
||||||
|
type ODToken = {
|
||||||
|
odToken: string | null;
|
||||||
|
tokenDate: Date | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
export let odToken: ODToken = {
|
||||||
|
odToken: null,
|
||||||
|
tokenDate: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getToken = async () => {
|
||||||
|
const log = createLogger({ module: "opendock", subModule: "releaseMonitor" });
|
||||||
|
try {
|
||||||
|
const { status, data } = await axios.post(
|
||||||
|
`${process.env.OPENDOCK_URL}/auth/login`,
|
||||||
|
{
|
||||||
|
email: "blake.matthes@alpla.com",
|
||||||
|
password: process.env.OPENDOCK_PASSWORD,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (status === 400) {
|
||||||
|
log.error(data.message);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
odToken = { odToken: data.access_token, tokenDate: new Date() };
|
||||||
|
log.info({}, "Token added");
|
||||||
|
} catch (e) {
|
||||||
|
log.error({ error: e }, "Error getting/refreshing token");
|
||||||
|
}
|
||||||
|
};
|
||||||
40
backend/opendock/opendockGetRelease.route.ts
Normal file
40
backend/opendock/opendockGetRelease.route.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import { desc, gte, sql } from "drizzle-orm";
|
||||||
|
import { Router } from "express";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { opendockApt } from "../db/schema/opendock.schema.js";
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.get("/", async (_, res) => {
|
||||||
|
//const limit
|
||||||
|
|
||||||
|
const daysCreated = 30;
|
||||||
|
|
||||||
|
const { data } = await tryCatch(
|
||||||
|
db
|
||||||
|
.select()
|
||||||
|
.from(opendockApt)
|
||||||
|
.where(
|
||||||
|
gte(
|
||||||
|
opendockApt.createdAt,
|
||||||
|
sql.raw(`NOW() - INTERVAL '${daysCreated} days'`),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.orderBy(desc(opendockApt.createdAt))
|
||||||
|
.limit(500),
|
||||||
|
);
|
||||||
|
|
||||||
|
apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "opendock",
|
||||||
|
subModule: "apt",
|
||||||
|
message: `The first ${data?.length} Apt(s) that were created in the last ${daysCreated} `,
|
||||||
|
data: data ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default r;
|
||||||
69
backend/opendock/opendockSocketMonitor.utils.ts
Normal file
69
backend/opendock/opendockSocketMonitor.utils.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import { io, type Socket } from "socket.io-client";
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import { systemSettings } from "../server.js";
|
||||||
|
import { getToken, odToken } from "./opendock.utils.js";
|
||||||
|
|
||||||
|
const log = createLogger({ module: "opendock", subModule: "releaseMonitor" });
|
||||||
|
const TWENTY_FOUR_HOURS = 24 * 60 * 60 * 1000;
|
||||||
|
let socket: Socket | null = null;
|
||||||
|
export const opendockSocketMonitor = async () => {
|
||||||
|
// checking if we actaully want to run this
|
||||||
|
if (!systemSettings.filter((n) => n.name === "opendock_sync")[0]?.active) {
|
||||||
|
log.info({}, "Opendock is not active");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!odToken.odToken) {
|
||||||
|
log.info({}, "Getting Auth Token");
|
||||||
|
await getToken();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
new Date(odToken.tokenDate || Date.now()).getTime() <
|
||||||
|
Date.now() - TWENTY_FOUR_HOURS
|
||||||
|
) {
|
||||||
|
log.info({}, "Refreshing Auth Token");
|
||||||
|
await getToken();
|
||||||
|
}
|
||||||
|
const baseSubspaceUrl = "wss://subspace.staging.opendock.com";
|
||||||
|
const url = `${baseSubspaceUrl}?token=${odToken.odToken}`;
|
||||||
|
socket = io(url, { transports: ["websocket"] }); // Enforce 'websocket' transport only.
|
||||||
|
|
||||||
|
socket.on("connect", () => {
|
||||||
|
console.log("Connected");
|
||||||
|
});
|
||||||
|
|
||||||
|
// socket.on("heartbeat", (data) => {
|
||||||
|
// console.log(data);
|
||||||
|
// });
|
||||||
|
|
||||||
|
socket.on("create-Appointment", (data) => {
|
||||||
|
console.log("appt create:", data);
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on("update-Appointment", (data) => {
|
||||||
|
console.log("appt update:", data);
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on("error", (data) => {
|
||||||
|
console.log("Error:", data);
|
||||||
|
});
|
||||||
|
|
||||||
|
// socket.onAny((event, ...args) => {
|
||||||
|
// console.log("Received event:", event, args);
|
||||||
|
// });
|
||||||
|
};
|
||||||
|
|
||||||
|
export const killOpendockSocket = () => {
|
||||||
|
if (!socket) {
|
||||||
|
console.log("No active socket to kill");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("🛑 Killing socket connection...");
|
||||||
|
|
||||||
|
socket.removeAllListeners(); // optional but clean
|
||||||
|
socket.disconnect();
|
||||||
|
socket = null;
|
||||||
|
|
||||||
|
console.log("✅ Socket killed");
|
||||||
|
};
|
||||||
17
backend/prodSql/prodSql.routes.ts
Normal file
17
backend/prodSql/prodSql.routes.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { type Express, Router } from "express";
|
||||||
|
import { requireAuth } from "../middleware/auth.middleware.js";
|
||||||
|
import restart from "./prodSqlRestart.route.js";
|
||||||
|
import start from "./prodSqlStart.route.js";
|
||||||
|
import stop from "./prodSqlStop.route.js";
|
||||||
|
export const setupProdSqlRoutes = (baseUrl: string, app: Express) => {
|
||||||
|
//setup all the routes
|
||||||
|
// Apply auth to entire router
|
||||||
|
const router = Router();
|
||||||
|
router.use(requireAuth);
|
||||||
|
|
||||||
|
router.use(start);
|
||||||
|
router.use(stop);
|
||||||
|
router.use(restart);
|
||||||
|
|
||||||
|
app.use(`${baseUrl}/api/system/prodSql`, router);
|
||||||
|
};
|
||||||
@@ -35,7 +35,8 @@ export const connectProdSql = async () => {
|
|||||||
|
|
||||||
// try to connect to the sql server
|
// try to connect to the sql server
|
||||||
try {
|
try {
|
||||||
pool = await sql.connect(prodSqlConfig);
|
pool = new sql.ConnectionPool(prodSqlConfig);
|
||||||
|
await pool.connect();
|
||||||
connected = true;
|
connected = true;
|
||||||
return returnFunc({
|
return returnFunc({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -151,6 +152,6 @@ export const reconnectToSql = async () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
reconnecting = false;
|
reconnecting = false;
|
||||||
// exit alert someone here
|
// TODO: exit alert someone here
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -58,7 +58,7 @@ export const prodQuery = async (queryToRun: string, name: string) => {
|
|||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Query results for: ${name}`,
|
message: `Query results for: ${name}`,
|
||||||
data: result.recordset,
|
data: result.recordset ?? [],
|
||||||
};
|
};
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const err = error as SqlError;
|
const err = error as SqlError;
|
||||||
29
backend/prodSql/prodSqlQuerySelector.utils.ts
Normal file
29
backend/prodSql/prodSqlQuerySelector.utils.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { readFileSync } from "node:fs";
|
||||||
|
|
||||||
|
export type SqlQuery = {
|
||||||
|
query: string;
|
||||||
|
success: boolean;
|
||||||
|
message: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const sqlQuerySelector = (name: string) => {
|
||||||
|
try {
|
||||||
|
const queryFile = readFileSync(
|
||||||
|
new URL(`../prodSql/queries/${name}.sql`, import.meta.url),
|
||||||
|
"utf8",
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: `Query for: ${name}`,
|
||||||
|
query: queryFile,
|
||||||
|
};
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
message:
|
||||||
|
"Error getting the query file, please make sure you have the correct name.",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
208
backend/prodSql/queries/activeArticles.sql
Normal file
208
backend/prodSql/queries/activeArticles.sql
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
use AlplaPROD_test1
|
||||||
|
|
||||||
|
SELECT V_Artikel.IdArtikelvarianten,
|
||||||
|
V_Artikel.Bezeichnung,
|
||||||
|
V_Artikel.ArtikelvariantenTypBez,
|
||||||
|
V_Artikel.PreisEinheitBez,
|
||||||
|
case when sales.price is null then 0 else sales.price end as salesPrice,
|
||||||
|
TypeOfMaterial=CASE
|
||||||
|
WHEN
|
||||||
|
V_Artikel.ArtikelvariantenTypBez LIKE'%Additive'
|
||||||
|
Then 'AD'
|
||||||
|
when V_Artikel.ArtikelvariantenTypBez Like '%Masterbatch'
|
||||||
|
THEN 'MB'
|
||||||
|
WHEN V_Artikel.ArtikelvariantenTypBez ='Pallet' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Top' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Bags' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Bag' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Stretch Wrap' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Stretch Film' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Banding Materials' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Carton' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Re-Shipper Box' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Label' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Pallet Label' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Carton Label' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Liner' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Dose Cup' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Metal Cage' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez ='Spout' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Slip Sheet' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Palet' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'LID' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'Metal' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'Corner post' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'Bottle Label' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Paper label' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Banding' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Glue' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Top Frame' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'IML Label' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Purch EBM Bottle' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Purchased Spout' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Gaylord' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Misc. Packaging' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Sleeve' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Plastic Bag' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Purch Spout' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Seal' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Tape' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Box' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Label IML' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Pallet Runner'
|
||||||
|
THEN 'PKG'
|
||||||
|
WHEN V_Artikel.ArtikelvariantenTypBez='HD-PE' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez='HD-PE PCR' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez='HD-PP' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'PP' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez LIKE '%PCR' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'PP' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'PET' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'PET-P' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez= 'PET-G'
|
||||||
|
THEN 'MM'
|
||||||
|
WHEN
|
||||||
|
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez='$Waste Container' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez='Mixed-Waste' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez LIKE'%-Waste%'
|
||||||
|
THEN 'Waste'
|
||||||
|
WHEN
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Bottle' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'SBM Bottle' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'EBM Bottle' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'ISBM Bottle' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Decorated Bottle'
|
||||||
|
THEN 'Bottle'
|
||||||
|
WHEN V_Artikel.ArtikelvariantenTypBez = 'Preform'
|
||||||
|
Then 'Preform'
|
||||||
|
When
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Purchased Preform' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Purchased Caps' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Purchased_preform'
|
||||||
|
THEN 'Purchased_preform'
|
||||||
|
When
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Closures' or
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Cap'
|
||||||
|
THEN 'Caps'
|
||||||
|
When
|
||||||
|
V_Artikel.ArtikelvariantenTypBez = 'Dummy'
|
||||||
|
THEN 'Not used'
|
||||||
|
ELSE 'Item not defined' END
|
||||||
|
,V_Artikel.IdArtikelvariantenTyp,
|
||||||
|
Round(V_Artikel.ArtikelGewicht, 3) as Article_Weight,
|
||||||
|
IdAdresse,
|
||||||
|
AdressBez,
|
||||||
|
AdressTypBez,
|
||||||
|
ProdBereichBez,
|
||||||
|
FG=case when
|
||||||
|
V_Artikel.ProdBereichBez = 'SBM' or
|
||||||
|
V_Artikel.ProdBereichBez = 'IM-Caps' or
|
||||||
|
V_Artikel.ProdBereichBez = 'IM-PET' or
|
||||||
|
V_Artikel.ProdBereichBez = 'PRINT OFFICE' or
|
||||||
|
V_Artikel.ProdBereichBez = 'EBM' or
|
||||||
|
V_Artikel.ProdBereichBez = 'ISBM' or
|
||||||
|
V_Artikel.ProdBereichBez = 'IM-Finishing'
|
||||||
|
Then 'FG'
|
||||||
|
Else 'not Defined Profit Center'
|
||||||
|
end,
|
||||||
|
V_Artikel.Umlaeufe as num_of_cycles,
|
||||||
|
V_FibuKonten_BASIS.FibuKontoNr as CostsCenterId,
|
||||||
|
V_FibuKonten_BASIS.Bezeichnung as CostCenterDescription,
|
||||||
|
sales.[KdArtNr] as CustomerArticleNumber,
|
||||||
|
sales.[KdArtBez] as CustomerArticleDescription,
|
||||||
|
round(V_Artikel.Zyklus, 2) as CycleTime,
|
||||||
|
Sypronummer as salesAgreement,
|
||||||
|
V_Artikel.ProdArtikelBez as ProductFamily
|
||||||
|
--,REPLACE(pur.UOM,'UOM:','')
|
||||||
|
,Case when LEFT(
|
||||||
|
LTRIM(REPLACE(pur.UOM,'UOM:','')),
|
||||||
|
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
|
||||||
|
) is null then '1' else LEFT(
|
||||||
|
LTRIM(REPLACE(pur.UOM,'UOM:','')),
|
||||||
|
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
|
||||||
|
) end AS UOM
|
||||||
|
|
||||||
|
--,*
|
||||||
|
FROM dbo.V_Artikel (nolock)
|
||||||
|
|
||||||
|
join
|
||||||
|
dbo.V_Artikelvarianten (nolock) on dbo.V_Artikel.IdArtikelvarianten =
|
||||||
|
dbo.V_Artikelvarianten.IdArtikelvarianten
|
||||||
|
|
||||||
|
join
|
||||||
|
dbo.V_FibuKonten_BASIS (nolock) on dbo.V_Artikelvarianten.IdFibuKonto =
|
||||||
|
dbo.V_FibuKonten_BASIS.IdFibuKonto
|
||||||
|
|
||||||
|
|
||||||
|
-- adding in the sales price
|
||||||
|
left join
|
||||||
|
(select * from
|
||||||
|
(select
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
|
||||||
|
IdArtikelvarianten as av
|
||||||
|
,GueltigabDatum as validDate
|
||||||
|
,VKPreis as price
|
||||||
|
,[KdArtNr]
|
||||||
|
,[KdArtBez]
|
||||||
|
--,*
|
||||||
|
from dbo.T_HistoryVK (nolock)
|
||||||
|
where
|
||||||
|
--GueltigabDatum > getDate() - 120
|
||||||
|
--and
|
||||||
|
Aktiv = 1
|
||||||
|
and StandardKunde = 1 -- default address
|
||||||
|
) a
|
||||||
|
where RN = 1) as sales
|
||||||
|
on dbo.V_Artikel.IdArtikelvarianten = sales.av
|
||||||
|
|
||||||
|
/* adding the purchase price info */
|
||||||
|
left join
|
||||||
|
(select * from
|
||||||
|
(select
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
|
||||||
|
IdArtikelvarianten as av
|
||||||
|
,GueltigabDatum as validDate
|
||||||
|
,EKPreis as price
|
||||||
|
,LiefArtNr as supplierNr
|
||||||
|
--,CASE
|
||||||
|
-- WHEN Bemerkung IS NOT NULL AND Bemerkung LIKE '%UOM:%'
|
||||||
|
-- THEN
|
||||||
|
-- -- incase there is something funny going on in the remark well jsut check for new lines and what not
|
||||||
|
-- LEFT(
|
||||||
|
-- REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' '),
|
||||||
|
-- CASE
|
||||||
|
-- WHEN CHARINDEX(' ', REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' ')) > 0
|
||||||
|
-- THEN CHARINDEX(' ', REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' ')) - 1
|
||||||
|
-- ELSE LEN(Bemerkung)
|
||||||
|
-- END
|
||||||
|
-- )
|
||||||
|
-- ELSE 'UOM:1'
|
||||||
|
-- END AS UOM
|
||||||
|
,CASE
|
||||||
|
WHEN Bemerkung IS NOT NULL AND Bemerkung LIKE '%UOM:%'
|
||||||
|
THEN
|
||||||
|
LTRIM(
|
||||||
|
SUBSTRING(
|
||||||
|
Bemerkung,
|
||||||
|
CHARINDEX('UOM:', UPPER(Bemerkung)) + LEN('UOM:'),
|
||||||
|
LEN(Bemerkung)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
ELSE
|
||||||
|
'UOM:1'
|
||||||
|
END AS UOM
|
||||||
|
,Bemerkung
|
||||||
|
--,*
|
||||||
|
from dbo.T_HistoryEK (nolock)
|
||||||
|
where
|
||||||
|
StandardLieferant = 1 -- default address
|
||||||
|
) a
|
||||||
|
where RN = 1) as pur
|
||||||
|
on dbo.V_Artikel.IdArtikelvarianten = pur.av
|
||||||
|
|
||||||
|
where V_Artikel.aktiv = 1 --and dbo.V_Artikel.IdArtikelvarianten = 1445
|
||||||
|
|
||||||
|
order by V_Artikel.IdArtikelvarianten /*, TypeOfMaterial */
|
||||||
74
backend/prodSql/queries/deliveryByDateRange.sql
Normal file
74
backend/prodSql/queries/deliveryByDateRange.sql
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
use [test1_AlplaPROD2.0_Read]
|
||||||
|
|
||||||
|
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
|
||||||
|
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
|
||||||
|
SELECT
|
||||||
|
r.[ArticleHumanReadableId]
|
||||||
|
,[ReleaseNumber]
|
||||||
|
,h.CustomerOrderNumber
|
||||||
|
,x.CustomerLineItemNumber
|
||||||
|
,[CustomerReleaseNumber]
|
||||||
|
,[ReleaseState]
|
||||||
|
,[DeliveryState]
|
||||||
|
,ea.JournalNummer as BOL_Number
|
||||||
|
,[ReleaseConfirmationState]
|
||||||
|
,[PlanningState]
|
||||||
|
--,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
|
||||||
|
,r.[OrderDate]
|
||||||
|
--,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
|
||||||
|
,r.[DeliveryDate]
|
||||||
|
--,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
|
||||||
|
,r.[LoadingDate]
|
||||||
|
,[Quantity]
|
||||||
|
,[DeliveredQuantity]
|
||||||
|
,r.[AdditionalInformation1]
|
||||||
|
,r.[AdditionalInformation2]
|
||||||
|
,[TradeUnits]
|
||||||
|
,[LoadingUnits]
|
||||||
|
,[Trucks]
|
||||||
|
,[LoadingToleranceType]
|
||||||
|
,[SalesPrice]
|
||||||
|
,[Currency]
|
||||||
|
,[QuantityUnit]
|
||||||
|
,[SalesPriceRemark]
|
||||||
|
,r.[Remark]
|
||||||
|
,[Irradiated]
|
||||||
|
,r.[CreatedByEdi]
|
||||||
|
,[DeliveryAddressHumanReadableId]
|
||||||
|
,DeliveryAddressDescription
|
||||||
|
,[CustomerArtNo]
|
||||||
|
,[TotalPrice]
|
||||||
|
,r.[ArticleAlias]
|
||||||
|
|
||||||
|
FROM [order].[Release] (nolock) as r
|
||||||
|
|
||||||
|
left join
|
||||||
|
[order].LineItem as x on
|
||||||
|
|
||||||
|
r.LineItemId = x.id
|
||||||
|
|
||||||
|
left join
|
||||||
|
[order].Header as h on
|
||||||
|
x.HeaderId = h.id
|
||||||
|
|
||||||
|
--bol stuff
|
||||||
|
left join
|
||||||
|
AlplaPROD_test1.dbo.V_LadePlanungenLadeAuftragAbruf (nolock) as zz
|
||||||
|
on zz.AbrufIdAuftragsAbruf = r.ReleaseNumber
|
||||||
|
|
||||||
|
left join
|
||||||
|
(select * from (SELECT
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY IdJournal ORDER BY add_date DESC) AS RowNum
|
||||||
|
,*
|
||||||
|
FROM [AlplaPROD_test1].[dbo].[T_Lieferungen] (nolock)) x
|
||||||
|
|
||||||
|
where RowNum = 1) as ea on
|
||||||
|
zz.IdLieferschein = ea.IdJournal
|
||||||
|
|
||||||
|
where
|
||||||
|
--r.ArticleHumanReadableId in ([articles])
|
||||||
|
--r.ReleaseNumber = 1452
|
||||||
|
|
||||||
|
r.DeliveryDate between @StartDate AND @EndDate
|
||||||
|
and DeliveredQuantity > 0
|
||||||
|
--and Journalnummer = 169386
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
export const prodSqlServerStats = `
|
|
||||||
DECLARE @UptimeSeconds INT;
|
DECLARE @UptimeSeconds INT;
|
||||||
DECLARE @StartTime DATETIME;
|
DECLARE @StartTime DATETIME;
|
||||||
|
|
||||||
@@ -13,4 +13,4 @@ SELECT
|
|||||||
(@UptimeSeconds % 86400) / 3600 AS [Hours],
|
(@UptimeSeconds % 86400) / 3600 AS [Hours],
|
||||||
(@UptimeSeconds % 3600) / 60 AS [Minutes],
|
(@UptimeSeconds % 3600) / 60 AS [Minutes],
|
||||||
(@UptimeSeconds % 60) AS [Seconds];
|
(@UptimeSeconds % 60) AS [Seconds];
|
||||||
`;
|
|
||||||
72
backend/prodSql/queries/releaseChecks.sql
Normal file
72
backend/prodSql/queries/releaseChecks.sql
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
SELECT
|
||||||
|
[Id]
|
||||||
|
,[ReleaseNumber]
|
||||||
|
,[CustomerReleaseNumber]
|
||||||
|
,[ReleaseState]
|
||||||
|
,[LineItemId]
|
||||||
|
,[BlanketOrderId]
|
||||||
|
,[DeliveryState]
|
||||||
|
,[ReleaseConfirmationState]
|
||||||
|
,[PlanningState]
|
||||||
|
,[OrderDate]
|
||||||
|
,cast([DeliveryDate] as datetime2) as DeliveryDate
|
||||||
|
,[LoadingDate]
|
||||||
|
,[Quantity]
|
||||||
|
,[DeliveredQuantity]
|
||||||
|
,[DeliveredQuantityTradeUnits]
|
||||||
|
,[DeliveredQuantityLoadingUnits]
|
||||||
|
,[PackagingId]
|
||||||
|
,[PackagingHumanReadableId]
|
||||||
|
,[PackagingDescription]
|
||||||
|
,[MainMaterialId]
|
||||||
|
,[MainMaterialHumanReadableId]
|
||||||
|
,[MainMaterialDescription]
|
||||||
|
,[AdditionalInformation1]
|
||||||
|
,[AdditionalInformation2]
|
||||||
|
,[D365SupplierLot]
|
||||||
|
,[TradeUnits]
|
||||||
|
,[LoadingUnits]
|
||||||
|
,[Trucks]
|
||||||
|
,[LoadingToleranceType]
|
||||||
|
,[UnderdeliveryDeviation]
|
||||||
|
,[OverdeliveryDeviation]
|
||||||
|
,[ArticleAccountRequirements_ArticleExact]
|
||||||
|
,[ArticleAccountRequirements_CustomerExact]
|
||||||
|
,[ArticleAccountRequirements_PackagingExact]
|
||||||
|
,[ArticleAccountRequirements_MainMaterialExact]
|
||||||
|
,[PriceLogicType]
|
||||||
|
,[AllowProductionLotMixing]
|
||||||
|
,[EnforceStrictPicking]
|
||||||
|
,[SalesPrice]
|
||||||
|
,[Currency]
|
||||||
|
,[QuantityUnit]
|
||||||
|
,[SalesPriceRemark]
|
||||||
|
,[DeliveryConditionId]
|
||||||
|
,[DeliveryConditionHumanReadableId]
|
||||||
|
,[DeliveryConditionDescription]
|
||||||
|
,[PaymentTermsId]
|
||||||
|
,[PaymentTermsHumanReadableId]
|
||||||
|
,[PaymentTermsDescription]
|
||||||
|
,[Remark]
|
||||||
|
,[DeliveryAddressId]
|
||||||
|
,[DeliveryAddressHumanReadableId]
|
||||||
|
,[DeliveryAddressDescription]
|
||||||
|
,[DeliveryStreetName]
|
||||||
|
,[DeliveryAddressZip]
|
||||||
|
,[DeliveryCity]
|
||||||
|
,[DeliveryCountry]
|
||||||
|
,[ReleaseDiscount]
|
||||||
|
,[CustomerArtNo]
|
||||||
|
,[LineItemHumanReadableId]
|
||||||
|
,[LineItemArticle]
|
||||||
|
,[LineItemArticleWeight]
|
||||||
|
,[LineItemQuantityType]
|
||||||
|
,[TotalPrice]
|
||||||
|
,[Add_User]
|
||||||
|
,[Add_Date]
|
||||||
|
,[Upd_User]
|
||||||
|
,cast([Upd_Date] as dateTime) as Upd_Date
|
||||||
|
,[VatRate]
|
||||||
|
,[ArticleAlias]
|
||||||
|
FROM [test1_AlplaPROD2.0_Reporting].[reporting_order].[Release] (nolock)
|
||||||
|
where format([Upd_Date], 'yyyy-MM-dd HH:mm:ss') > [dateCheck]
|
||||||
188
backend/rfid/daytonConfig copy.json
Normal file
188
backend/rfid/daytonConfig copy.json
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
{
|
||||||
|
"GPIO-LED": {
|
||||||
|
"GPODefaults": {
|
||||||
|
"1": "HIGH",
|
||||||
|
"2": "HIGH",
|
||||||
|
"3": "HIGH",
|
||||||
|
"4": "HIGH"
|
||||||
|
},
|
||||||
|
"LEDDefaults": {
|
||||||
|
"3": "GREEN"
|
||||||
|
},
|
||||||
|
"TAG_READ": [
|
||||||
|
{
|
||||||
|
"pin": 1,
|
||||||
|
"state": "HIGH",
|
||||||
|
"type": "GPO"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"READER-GATEWAY": {
|
||||||
|
"batching": [
|
||||||
|
{
|
||||||
|
"maxPayloadSizePerReport": 256000,
|
||||||
|
"reportingInterval": 2000
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"endpointConfig": {
|
||||||
|
"data": {
|
||||||
|
"event": {
|
||||||
|
"connections": [
|
||||||
|
{
|
||||||
|
"additionalOptions": {
|
||||||
|
"batching": {
|
||||||
|
"maxPayloadSizePerReport": 256000,
|
||||||
|
"reportingInterval": 2000
|
||||||
|
},
|
||||||
|
"retention": {
|
||||||
|
"maxEventRetentionTimeInMin": 500,
|
||||||
|
"maxNumEvents": 150000,
|
||||||
|
"throttle": 100
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
|
"name": "LST",
|
||||||
|
"options": {
|
||||||
|
"URL": "https://usday1prod.alpla.net/lst/old/api/rfid/taginfo/line3.4",
|
||||||
|
"security": {
|
||||||
|
"CACertificateFileLocation": "",
|
||||||
|
"authenticationOptions": {
|
||||||
|
"privateKeyFileLocation": "/readerconfig/ssl/server.key",
|
||||||
|
"publicKeyFileLocation": "/readerconfig/ssl/server.crt"
|
||||||
|
},
|
||||||
|
"authenticationType": "NONE",
|
||||||
|
"verifyHost": false,
|
||||||
|
"verifyPeer": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "httpPost"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"managementEventConfig": {
|
||||||
|
"errors": {
|
||||||
|
"antenna": false,
|
||||||
|
"cpu": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 90
|
||||||
|
},
|
||||||
|
"database": true,
|
||||||
|
"flash": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 90
|
||||||
|
},
|
||||||
|
"ntp": true,
|
||||||
|
"radio": true,
|
||||||
|
"radio_control": true,
|
||||||
|
"ram": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 90
|
||||||
|
},
|
||||||
|
"reader_gateway": true,
|
||||||
|
"userApp": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 120
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gpiEvents": true,
|
||||||
|
"gpoEvents": true,
|
||||||
|
"heartbeat": {
|
||||||
|
"fields": {
|
||||||
|
"radio_control": [
|
||||||
|
"ANTENNAS",
|
||||||
|
"RADIO_ACTIVITY",
|
||||||
|
"RADIO_CONNECTION",
|
||||||
|
"CPU",
|
||||||
|
"RAM",
|
||||||
|
"UPTIME",
|
||||||
|
"NUM_ERRORS",
|
||||||
|
"NUM_WARNINGS",
|
||||||
|
"NUM_TAG_READS",
|
||||||
|
"NUM_TAG_READS_PER_ANTENNA",
|
||||||
|
"NUM_DATA_MESSAGES_TXED",
|
||||||
|
"NUM_RADIO_PACKETS_RXED"
|
||||||
|
],
|
||||||
|
"reader_gateway": [
|
||||||
|
"NUM_DATA_MESSAGES_RXED",
|
||||||
|
"NUM_MANAGEMENT_EVENTS_TXED",
|
||||||
|
"NUM_DATA_MESSAGES_TXED",
|
||||||
|
"NUM_DATA_MESSAGES_RETAINED",
|
||||||
|
"NUM_DATA_MESSAGES_DROPPED",
|
||||||
|
"CPU",
|
||||||
|
"RAM",
|
||||||
|
"UPTIME",
|
||||||
|
"NUM_ERRORS",
|
||||||
|
"NUM_WARNINGS",
|
||||||
|
"INTERFACE_CONNECTION_STATUS",
|
||||||
|
"NOLOCKQ_DEPTH"
|
||||||
|
],
|
||||||
|
"system": [
|
||||||
|
"CPU",
|
||||||
|
"FLASH",
|
||||||
|
"NTP",
|
||||||
|
"RAM",
|
||||||
|
"SYSTEMTIME",
|
||||||
|
"TEMPERATURE",
|
||||||
|
"UPTIME",
|
||||||
|
"GPO",
|
||||||
|
"GPI",
|
||||||
|
"POWER_NEGOTIATION",
|
||||||
|
"POWER_SOURCE",
|
||||||
|
"MAC_ADDRESS",
|
||||||
|
"HOSTNAME"
|
||||||
|
],
|
||||||
|
"userDefined": null,
|
||||||
|
"userapps": [
|
||||||
|
"STATUS",
|
||||||
|
"CPU",
|
||||||
|
"RAM",
|
||||||
|
"UPTIME",
|
||||||
|
"NUM_DATA_MESSAGES_RXED",
|
||||||
|
"NUM_DATA_MESSAGES_TXED",
|
||||||
|
"INCOMING_DATA_BUFFER_PERCENTAGE_REMAINING",
|
||||||
|
"OUTGOING_DATA_BUFFER_PERCENTAGE_REMAINING"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"interval": 60
|
||||||
|
},
|
||||||
|
"userappEvents": true,
|
||||||
|
"warnings": {
|
||||||
|
"cpu": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 80
|
||||||
|
},
|
||||||
|
"database": true,
|
||||||
|
"flash": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 80
|
||||||
|
},
|
||||||
|
"ntp": true,
|
||||||
|
"radio_api": true,
|
||||||
|
"radio_control": true,
|
||||||
|
"ram": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 80
|
||||||
|
},
|
||||||
|
"reader_gateway": true,
|
||||||
|
"temperature": {
|
||||||
|
"ambient": 75,
|
||||||
|
"pa": 105
|
||||||
|
},
|
||||||
|
"userApp": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 60
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"retention": [
|
||||||
|
{
|
||||||
|
"maxEventRetentionTimeInMin": 500,
|
||||||
|
"maxNumEvents": 150000,
|
||||||
|
"throttle": 100
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"xml": "<?xml version='1.0'?>\n<Motorola xmlns:Falcon='http://www.motorola.com/RFID/Readers/Config/Falcon' xmlns='http://www.motorola.com/RFID/Readers/Config/Falcon'>\n<Config>\n<AppVersion major='3' minor='28' build='1' maintenance='0'/>\n<CommConfig EnabledStacks='IPV4' DisableRAPktProcessing='1' EnableDHCPv6='1' IPv6StaticIPAddr='fe80::1' IPv6SubnetMask='64' IPv6StaticGateway='::' IPv6DNSIP='fe80::20' DHCP='1' IPAddr='10.44.14.39' Mask='255.255.255.0' Gateway='10.44.14.252' DNS='10.44.9.250' DomainSearch='example.com' HttpRunning='2' TelnetActive='2' FtpActive='2' usbMode='0' WatchdogEnabled='1' AvahiEnabled='1' NetBIOSEnabled='0' RDMPAgentEnabled='1' SerialConTimeout='0' SNTP='0.0.0.0' SNTPHostName='pool.ntp.org' sntpHostDisplayMode='0' llrpClientMode='0' llrpSecureMode='0' llrpSecureModeValidatePeer='0' llrpPort='5084' llrpHostIP='192.168.127.2' allowllrpConnOverride='0' shouldReconnect='1'/>\n<Bluetooth discoverable='0' pairable='0' PincodeEnabled='0' passkey='165CB22DA5BE7BBEFB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03FB77709DD0A94B03' startIP='192.168.0.2' endIP='192.168.0.3'/>\n<WirelessConfig essid='' autoconnect='0'/>\n<RegionConfig RFCountry='United States/Canada' RFRegulatory='US FCC 15' RFScanMode='0' LBTEnable='0' ChannelData='FFFFFFFFFFFFFFFF'/>\n<SnmpConfig snmpVersion='1' heartbeat='1'/>\n<SyslogConfig RemoteIp='0.0.0.0' RemotePort='514' LogMinSeverity='7' ApplyFilter='0' MinimumSeverity='7' ProcessFilter='rmserver.elf,llrpserver.elf,snmpextagent.elf,RDMPAgent'/>\n<UserList>\n<User name='admin' PSWD='$6$weLpDwlv$utr0AwgPIae2O4Gln4cQ2IJJblXye412Xqni0V.ahIFKUOCEDGjzZ4ttthhrw7rmmQYsCXKwA9znyqPkAT.IL/'/>\n<User name='rfidadm' PSWD='15491'/>\n</UserList>\n<IPReader name='FX96007AF832 FX9600 RFID Reader' desc='FX96007AF832 Advanced Reader' flags='0' MonoStatic='0' CheckAntenna='1' gpiDebounceTime='0' gpioMapping='0' idleModeTimeOut='0' diagMode='0' extDiagMode='0' contact='Zebra Technologies Corporation' PowerNegotiation='0' PowerNegotiationProtocol='0' allowGuestLogin='1' configureHostName='0'>\n<ReadPoint name='Read Point 1' flags='0' CableLossPerHundredFt='10' CableLength='10'/>\n<ReadPoint name='Read Point 2' flags='0' CableLossPerHundredFt='10' CableLength='10'/>\n<ReadPoint name='Read Point 3' flags='1' CableLossPerHundredFt='10' CableLength='10'/>\n<ReadPoint name='Read Point 4' flags='1' CableLossPerHundredFt='10' CableLength='10'/>\n</IPReader>\n<SerialPortConf Mode='0' Baudrate='115200' Databits='8' Parity='none' Stopbits='1' Flowcontrol='hardware' TagMetaData='0' InventoryControl='0' IsAutostart='0'/>\n<FXConnectConfig FXConnectMode='0' TagMetaData='0' InventoryControl='None' HeartBeatPeriod='0' IsAutostart='0' PreFilterMode='0' PreFilters='None'/>\n<ProfinetConfig virtualDAP='1'/>\n<NodeJSPortConf Portnumber='8001'/>\n</Config>\n<MOTOROLA_LLRP_CONFIG><LLRP_READER_CONFIG />\n</MOTOROLA_LLRP_CONFIG>\n<IOT_CONNECT_CONFIG><OPERATING_MODE />\n</IOT_CONNECT_CONFIG>\n<RadioProfileData><RadioRegisterData Address='0' Data='00'/>\n</RadioProfileData>\n<CustomProfileData ForceEAPMode='0' FIPS_MODE_ENABLED='0' MaxNumberOfTagsBuffered='512'/>\n</Motorola >\n"
|
||||||
|
}
|
||||||
206
backend/rfid/daytonConfig.json
Normal file
206
backend/rfid/daytonConfig.json
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
{
|
||||||
|
"GPIO-LED": {
|
||||||
|
"GPODefaults": {
|
||||||
|
"1": "HIGH",
|
||||||
|
"2": "HIGH",
|
||||||
|
"3": "HIGH",
|
||||||
|
"4": "HIGH"
|
||||||
|
},
|
||||||
|
"LEDDefaults": {
|
||||||
|
"3": "GREEN"
|
||||||
|
},
|
||||||
|
"TAG_READ": [
|
||||||
|
{
|
||||||
|
"pin": 1,
|
||||||
|
"state": "HIGH",
|
||||||
|
"type": "GPO"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"READER-GATEWAY": {
|
||||||
|
"batching": [
|
||||||
|
{
|
||||||
|
"maxPayloadSizePerReport": 256000,
|
||||||
|
"reportingInterval": 2000
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"endpointConfig": {
|
||||||
|
"data": {
|
||||||
|
"event": {
|
||||||
|
"connections": [
|
||||||
|
{
|
||||||
|
"additionalOptions": {
|
||||||
|
"retention": {
|
||||||
|
"maxEventRetentionTimeInMin": 500,
|
||||||
|
"maxNumEvents": 150000,
|
||||||
|
"throttle": 100
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
|
"name": "lst",
|
||||||
|
"options": {
|
||||||
|
"URL": "http://usday1vms006:3100/api/rfid/taginfo/wrapper1",
|
||||||
|
"security": {
|
||||||
|
"CACertificateFileLocation": "",
|
||||||
|
"authenticationOptions": {},
|
||||||
|
"authenticationType": "NONE",
|
||||||
|
"verifyHost": false,
|
||||||
|
"verifyPeer": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "httpPost"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"additionalOptions": {
|
||||||
|
"retention": {
|
||||||
|
"maxEventRetentionTimeInMin": 500,
|
||||||
|
"maxNumEvents": 150000,
|
||||||
|
"throttle": 100
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "",
|
||||||
|
"name": "mgt",
|
||||||
|
"options": {
|
||||||
|
"URL": "http://usday1vms006:3100/api/rfid/mgtevents/wrapper1",
|
||||||
|
"security": {
|
||||||
|
"CACertificateFileLocation": "",
|
||||||
|
"authenticationOptions": {},
|
||||||
|
"authenticationType": "NONE",
|
||||||
|
"verifyHost": false,
|
||||||
|
"verifyPeer": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "httpPost"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"interfaces": {
|
||||||
|
"tagDataInterface1": "lst",
|
||||||
|
"managementEventsInterface": "mgt"
|
||||||
|
},
|
||||||
|
"managementEventConfig": {
|
||||||
|
"errors": {
|
||||||
|
"antenna": false,
|
||||||
|
"cpu": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 90
|
||||||
|
},
|
||||||
|
"database": true,
|
||||||
|
"flash": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 90
|
||||||
|
},
|
||||||
|
"ntp": true,
|
||||||
|
"radio": true,
|
||||||
|
"radio_control": true,
|
||||||
|
"ram": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 90
|
||||||
|
},
|
||||||
|
"reader_gateway": true,
|
||||||
|
"userApp": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 120
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gpiEvents": true,
|
||||||
|
"gpoEvents": true,
|
||||||
|
"heartbeat": {
|
||||||
|
"fields": {
|
||||||
|
"radio_control": [
|
||||||
|
"ANTENNAS",
|
||||||
|
"RADIO_ACTIVITY",
|
||||||
|
"RADIO_CONNECTION",
|
||||||
|
"CPU",
|
||||||
|
"RAM",
|
||||||
|
"UPTIME",
|
||||||
|
"NUM_ERRORS",
|
||||||
|
"NUM_WARNINGS",
|
||||||
|
"NUM_TAG_READS",
|
||||||
|
"NUM_TAG_READS_PER_ANTENNA",
|
||||||
|
"NUM_DATA_MESSAGES_TXED",
|
||||||
|
"NUM_RADIO_PACKETS_RXED"
|
||||||
|
],
|
||||||
|
"reader_gateway": [
|
||||||
|
"NUM_DATA_MESSAGES_RXED",
|
||||||
|
"NUM_MANAGEMENT_EVENTS_TXED",
|
||||||
|
"NUM_DATA_MESSAGES_TXED",
|
||||||
|
"NUM_DATA_MESSAGES_RETAINED",
|
||||||
|
"NUM_DATA_MESSAGES_DROPPED",
|
||||||
|
"CPU",
|
||||||
|
"RAM",
|
||||||
|
"UPTIME",
|
||||||
|
"NUM_ERRORS",
|
||||||
|
"NUM_WARNINGS",
|
||||||
|
"INTERFACE_CONNECTION_STATUS",
|
||||||
|
"NOLOCKQ_DEPTH"
|
||||||
|
],
|
||||||
|
"system": [
|
||||||
|
"CPU",
|
||||||
|
"FLASH",
|
||||||
|
"NTP",
|
||||||
|
"RAM",
|
||||||
|
"SYSTEMTIME",
|
||||||
|
"TEMPERATURE",
|
||||||
|
"UPTIME",
|
||||||
|
"GPO",
|
||||||
|
"GPI",
|
||||||
|
"POWER_NEGOTIATION",
|
||||||
|
"POWER_SOURCE",
|
||||||
|
"MAC_ADDRESS",
|
||||||
|
"HOSTNAME"
|
||||||
|
],
|
||||||
|
"userDefined": null,
|
||||||
|
"userapps": [
|
||||||
|
"STATUS",
|
||||||
|
"CPU",
|
||||||
|
"RAM",
|
||||||
|
"UPTIME",
|
||||||
|
"NUM_DATA_MESSAGES_RXED",
|
||||||
|
"NUM_DATA_MESSAGES_TXED",
|
||||||
|
"INCOMING_DATA_BUFFER_PERCENTAGE_REMAINING",
|
||||||
|
"OUTGOING_DATA_BUFFER_PERCENTAGE_REMAINING"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"interval": 60
|
||||||
|
},
|
||||||
|
"userappEvents": true,
|
||||||
|
"warnings": {
|
||||||
|
"cpu": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 80
|
||||||
|
},
|
||||||
|
"database": true,
|
||||||
|
"flash": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 80
|
||||||
|
},
|
||||||
|
"ntp": true,
|
||||||
|
"radio_api": true,
|
||||||
|
"radio_control": true,
|
||||||
|
"ram": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 80
|
||||||
|
},
|
||||||
|
"reader_gateway": true,
|
||||||
|
"temperature": {
|
||||||
|
"ambient": 75,
|
||||||
|
"pa": 105
|
||||||
|
},
|
||||||
|
"userApp": {
|
||||||
|
"reportIntervalInSec": 1800,
|
||||||
|
"threshold": 60
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"retention": [
|
||||||
|
{
|
||||||
|
"maxEventRetentionTimeInMin": 500,
|
||||||
|
"maxNumEvents": 150000,
|
||||||
|
"throttle": 100
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,25 +1,25 @@
|
|||||||
import type { Express } from "express";
|
import type { Express } from "express";
|
||||||
|
|
||||||
import { setupAuthRoutes } from "./auth/auth.routes.js";
|
import { setupAuthRoutes } from "./auth/auth.routes.js";
|
||||||
// import the routes and route setups
|
// import the routes and route setups
|
||||||
import { setupApiDocsRoutes } from "./configs/scaler.config.js";
|
import { setupApiDocsRoutes } from "./configs/scaler.config.js";
|
||||||
import { setupDatamartRoutes } from "./datamart/datamart.routes.js";
|
import { setupDatamartRoutes } from "./datamart/datamart.routes.js";
|
||||||
|
import { setupNotificationRoutes } from "./notification/notification.routes.js";
|
||||||
|
import { setupOCPRoutes } from "./ocp/ocp.routes.js";
|
||||||
|
import { setupOpendockRoutes } from "./opendock/opendock.routes.js";
|
||||||
import { setupProdSqlRoutes } from "./prodSql/prodSql.routes.js";
|
import { setupProdSqlRoutes } from "./prodSql/prodSql.routes.js";
|
||||||
import stats from "./system/stats.route.js";
|
import { setupSystemRoutes } from "./system/system.routes.js";
|
||||||
|
import { setupUtilsRoutes } from "./utils/utils.routes.js";
|
||||||
|
|
||||||
export const setupRoutes = (baseUrl: string, app: Express) => {
|
export const setupRoutes = (baseUrl: string, app: Express) => {
|
||||||
app.use(`${baseUrl}/api/stats`, stats);
|
|
||||||
//routes that are on by default
|
//routes that are on by default
|
||||||
|
setupSystemRoutes(baseUrl, app);
|
||||||
setupApiDocsRoutes(baseUrl, app);
|
setupApiDocsRoutes(baseUrl, app);
|
||||||
setupProdSqlRoutes(baseUrl, app);
|
setupProdSqlRoutes(baseUrl, app);
|
||||||
setupDatamartRoutes(baseUrl, app);
|
setupDatamartRoutes(baseUrl, app);
|
||||||
setupAuthRoutes(baseUrl, app);
|
setupAuthRoutes(baseUrl, app);
|
||||||
|
setupUtilsRoutes(baseUrl, app);
|
||||||
// routes that get activated if the module is set to activated.
|
setupOpendockRoutes(baseUrl, app);
|
||||||
|
setupNotificationRoutes(baseUrl, app);
|
||||||
app.all("*foo", (_, res) => {
|
setupOCPRoutes(baseUrl, app);
|
||||||
res.status(400).json({
|
|
||||||
message:
|
|
||||||
"You have encountered a route that dose not exist, please check the url and try again",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
43
backend/scaler/cronerActiveJobs.spec.ts
Normal file
43
backend/scaler/cronerActiveJobs.spec.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import type { OpenAPIV3_1 } from "openapi-types";
|
||||||
|
|
||||||
|
export const cronerActiveJobs: OpenAPIV3_1.PathsObject = {
|
||||||
|
"/api/utils/croner": {
|
||||||
|
get: {
|
||||||
|
summary: "Cron jobs",
|
||||||
|
description: "Returns all jobs on the server.",
|
||||||
|
tags: ["Utils"],
|
||||||
|
responses: {
|
||||||
|
"200": {
|
||||||
|
description: "Jobs returned",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
status: {
|
||||||
|
type: "boolean",
|
||||||
|
format: "boolean",
|
||||||
|
example: true,
|
||||||
|
},
|
||||||
|
uptime: {
|
||||||
|
type: "number",
|
||||||
|
format: "3454.34",
|
||||||
|
example: 3454.34,
|
||||||
|
},
|
||||||
|
memoryUsage: {
|
||||||
|
type: "string",
|
||||||
|
format: "Heap: 11.62 MB / RSS: 86.31 MB",
|
||||||
|
},
|
||||||
|
sqlServerStats: {
|
||||||
|
type: "number",
|
||||||
|
format: "442127",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
94
backend/scaler/cronerStatusChange.spec.ts
Normal file
94
backend/scaler/cronerStatusChange.spec.ts
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import type { OpenAPIV3_1 } from "openapi-types";
|
||||||
|
|
||||||
|
export const cronerStatusChange: OpenAPIV3_1.PathsObject = {
|
||||||
|
"/api/utils/croner/{status}": {
|
||||||
|
patch: {
|
||||||
|
summary: "Pauses or Resume the Job",
|
||||||
|
description:
|
||||||
|
"When sending start or stop with job name it will resume or stop the job",
|
||||||
|
tags: ["Utils"],
|
||||||
|
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "status",
|
||||||
|
in: "path",
|
||||||
|
required: true,
|
||||||
|
description: "Status change",
|
||||||
|
schema: {
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
|
example: "start",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "limit",
|
||||||
|
in: "query",
|
||||||
|
required: false, // 👈 optional
|
||||||
|
description: "Maximum number of records to return",
|
||||||
|
schema: {
|
||||||
|
type: "integer",
|
||||||
|
minimum: 1,
|
||||||
|
maximum: 100,
|
||||||
|
},
|
||||||
|
example: 10,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requestBody: {
|
||||||
|
required: true,
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: {
|
||||||
|
type: "object",
|
||||||
|
required: ["name"],
|
||||||
|
properties: {
|
||||||
|
name: {
|
||||||
|
type: "string",
|
||||||
|
example: "start",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
responses: {
|
||||||
|
"200": {
|
||||||
|
description: "Successful response",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
success: { type: "boolean", example: true },
|
||||||
|
data: {
|
||||||
|
type: "object",
|
||||||
|
example: {
|
||||||
|
name: "exampleName",
|
||||||
|
value: "some value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
description: "Bad request",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
success: { type: "boolean", example: false },
|
||||||
|
message: {
|
||||||
|
type: "string",
|
||||||
|
example: "Invalid name parameter",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -31,6 +31,12 @@ export const datamartAddSpec: OpenAPIV3_1.PathsObject = {
|
|||||||
description:
|
description:
|
||||||
"Optional comma separated options string passed to the query",
|
"Optional comma separated options string passed to the query",
|
||||||
},
|
},
|
||||||
|
publicAccess: {
|
||||||
|
type: "boolean",
|
||||||
|
example: "true",
|
||||||
|
description:
|
||||||
|
"Will this query be accessible by the frontend's",
|
||||||
|
},
|
||||||
queryFile: {
|
queryFile: {
|
||||||
type: "string",
|
type: "string",
|
||||||
format: "binary",
|
format: "binary",
|
||||||
36
backend/scaler/opendockGetRelease.spec.ts
Normal file
36
backend/scaler/opendockGetRelease.spec.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import type { OpenAPIV3_1 } from "openapi-types";
|
||||||
|
|
||||||
|
export const openDockApt: OpenAPIV3_1.PathsObject = {
|
||||||
|
"/api/opendock": {
|
||||||
|
get: {
|
||||||
|
summary: "Open Dock apt",
|
||||||
|
description: "Returns the last 30 days of apt(s).",
|
||||||
|
tags: ["Open Dock"],
|
||||||
|
responses: {
|
||||||
|
"200": {
|
||||||
|
description: "Jobs returned",
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
schema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
success: {
|
||||||
|
type: "boolean",
|
||||||
|
format: "boolean",
|
||||||
|
example: true,
|
||||||
|
},
|
||||||
|
message: {
|
||||||
|
type: "string",
|
||||||
|
format: "string",
|
||||||
|
example:
|
||||||
|
"The first 5 Apt(s) that were created in the last 30 days",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -3,6 +3,7 @@ import type { OpenAPIV3_1 } from "openapi-types";
|
|||||||
export const prodRestartSpec: OpenAPIV3_1.PathsObject = {
|
export const prodRestartSpec: OpenAPIV3_1.PathsObject = {
|
||||||
"/api/system/prodSql/restart": {
|
"/api/system/prodSql/restart": {
|
||||||
post: {
|
post: {
|
||||||
|
//security: [],
|
||||||
summary: "Prod restart sql connection",
|
summary: "Prod restart sql connection",
|
||||||
description: "Attempts to restart the sql connection.",
|
description: "Attempts to restart the sql connection.",
|
||||||
tags: ["System"],
|
tags: ["System"],
|
||||||
@@ -1,20 +1,63 @@
|
|||||||
|
import { createServer } from "node:http";
|
||||||
import os from "node:os";
|
import os from "node:os";
|
||||||
import createApp from "./app.js";
|
import createApp from "./app.js";
|
||||||
import { createLogger } from "./src/logger/logger.controller.js";
|
import { db } from "./db/db.controller.js";
|
||||||
import { connectProdSql } from "./src/prodSql/prodSqlConnection.controller.js";
|
import { dbCleanup } from "./db/dbCleanup.controller.js";
|
||||||
|
import { type Setting, settings } from "./db/schema/settings.schema.js";
|
||||||
|
import { createLogger } from "./logger/logger.controller.js";
|
||||||
|
import { startNotifications } from "./notification/notification.controller.js";
|
||||||
|
import { createNotifications } from "./notification/notifications.master.js";
|
||||||
|
import { monitorReleaseChanges } from "./opendock/openDockRreleaseMonitor.utils.js";
|
||||||
|
import { opendockSocketMonitor } from "./opendock/opendockSocketMonitor.utils.js";
|
||||||
|
import { connectProdSql } from "./prodSql/prodSqlConnection.controller.js";
|
||||||
|
import { setupSocketIORoutes } from "./socket.io/serverSetup.js";
|
||||||
|
import { baseSettingValidationCheck } from "./system/settingsBase.controller.js";
|
||||||
|
import { createCronJob } from "./utils/croner.utils.js";
|
||||||
|
|
||||||
const port = Number(process.env.PORT) || 3000;
|
const port = Number(process.env.PORT) || 3000;
|
||||||
|
export let systemSettings: Setting[] = [];
|
||||||
const start = async () => {
|
const start = async () => {
|
||||||
const log = createLogger({ module: "system", subModule: "main start" });
|
|
||||||
|
|
||||||
connectProdSql();
|
|
||||||
|
|
||||||
const { app, baseUrl } = await createApp();
|
const { app, baseUrl } = await createApp();
|
||||||
|
|
||||||
app.listen(port, async () => {
|
const server = createServer(app);
|
||||||
|
|
||||||
|
setupSocketIORoutes(baseUrl, server);
|
||||||
|
|
||||||
|
const log = createLogger({ module: "system", subModule: "main start" });
|
||||||
|
|
||||||
|
// triggering long lived processes
|
||||||
|
connectProdSql();
|
||||||
|
|
||||||
|
// trigger startup processes these must run before anything else can run
|
||||||
|
await baseSettingValidationCheck();
|
||||||
|
systemSettings = await db.select().from(settings);
|
||||||
|
|
||||||
|
//when starting up long lived features the name must match the setting name.
|
||||||
|
// also we always want to have long lived processes inside a setting check.
|
||||||
|
setTimeout(() => {
|
||||||
|
if (systemSettings.filter((n) => n.name === "opendock_sync")[0]?.active) {
|
||||||
|
log.info({}, "Opendock is not active");
|
||||||
|
monitorReleaseChanges(); // this is od monitoring the db for all new releases
|
||||||
|
opendockSocketMonitor();
|
||||||
|
createCronJob("opendockAptCleanup", "0 30 5 * * *", () =>
|
||||||
|
dbCleanup("opendockApt", 90),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// these jobs below are system jobs and should run no matter what.
|
||||||
|
createCronJob("JobAuditLogCleanUp", "0 0 5 * * *", () =>
|
||||||
|
dbCleanup("jobs", 30),
|
||||||
|
);
|
||||||
|
createCronJob("logsCleanup", "0 15 5 * * *", () => dbCleanup("logs", 120));
|
||||||
|
|
||||||
|
// one shots only needed to run on server startups
|
||||||
|
createNotifications();
|
||||||
|
startNotifications();
|
||||||
|
}, 5 * 1000);
|
||||||
|
|
||||||
|
server.listen(port, async () => {
|
||||||
log.info(
|
log.info(
|
||||||
`Listening on http://${os.hostname()}:${port}${baseUrl}, logging in ${process.env.LOG_LEVEL}`,
|
`Listening on http://${os.hostname()}:${port}${baseUrl}, logging in ${process.env.LOG_LEVEL}, current ENV ${process.env.NODE_ENV ? process.env.NODE_ENV : "development"}`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
8
backend/socket.io/roomCache.socket.ts
Normal file
8
backend/socket.io/roomCache.socket.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
import type { RoomId } from "./types.socket.js";
|
||||||
|
|
||||||
|
export const MAX_HISTORY = 50;
|
||||||
|
export const FLUSH_INTERVAL = 100; // 50ms change higher if needed
|
||||||
|
|
||||||
|
export const roomHistory = new Map<RoomId, unknown[]>();
|
||||||
|
export const roomBuffers = new Map<RoomId, any[]>();
|
||||||
|
export const roomFlushTimers = new Map<RoomId, NodeJS.Timeout>();
|
||||||
39
backend/socket.io/roomDefinitions.socket.ts
Normal file
39
backend/socket.io/roomDefinitions.socket.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { desc } from "drizzle-orm";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { logs } from "../db/schema/logs.schema.js";
|
||||||
|
import type { RoomId } from "./types.socket.js";
|
||||||
|
|
||||||
|
type RoomDefinition<T = unknown> = {
|
||||||
|
seed: (limit: number) => Promise<T[]>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const protectedRooms: any = {
|
||||||
|
logs: { requiresAuth: true, role: ["admin", "systemAdmin"] },
|
||||||
|
admin: { requiresAuth: true, role: ["admin", "systemAdmin"] },
|
||||||
|
};
|
||||||
|
|
||||||
|
export const roomDefinition: Record<RoomId, RoomDefinition> = {
|
||||||
|
logs: {
|
||||||
|
seed: async (limit) => {
|
||||||
|
try {
|
||||||
|
const rows = await db
|
||||||
|
.select()
|
||||||
|
.from(logs)
|
||||||
|
.orderBy(desc(logs.createdAt))
|
||||||
|
.limit(limit);
|
||||||
|
|
||||||
|
return rows; //.reverse();
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Failed to seed logs:", e);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
labels: {
|
||||||
|
seed: async (limit) => {
|
||||||
|
console.info(limit);
|
||||||
|
return [];
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
27
backend/socket.io/roomEmitter.socket.ts
Normal file
27
backend/socket.io/roomEmitter.socket.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
// the emitter setup
|
||||||
|
|
||||||
|
import type { RoomId } from "./types.socket.js";
|
||||||
|
|
||||||
|
let addDataToRoom: ((roomId: RoomId, payload: unknown[]) => void) | null = null;
|
||||||
|
|
||||||
|
export const registerEmitter = (
|
||||||
|
fn: (roomId: RoomId, payload: unknown[]) => void,
|
||||||
|
) => {
|
||||||
|
addDataToRoom = fn;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const emitToRoom = (roomId: RoomId, payload: unknown[]) => {
|
||||||
|
if (!addDataToRoom) {
|
||||||
|
console.error("Socket emitter not initialized");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
addDataToRoom(roomId, payload);
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
import { emitToRoom } from "../socket/socketEmitter.js";
|
||||||
|
// room name
|
||||||
|
// its payload
|
||||||
|
emitToRoom("logs", newLogRow);
|
||||||
|
*/
|
||||||
73
backend/socket.io/roomService.socket.ts
Normal file
73
backend/socket.io/roomService.socket.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import type { Server } from "socket.io";
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import {
|
||||||
|
FLUSH_INTERVAL,
|
||||||
|
MAX_HISTORY,
|
||||||
|
roomBuffers,
|
||||||
|
roomFlushTimers,
|
||||||
|
roomHistory,
|
||||||
|
} from "./roomCache.socket.js";
|
||||||
|
import { roomDefinition } from "./roomDefinitions.socket.js";
|
||||||
|
import type { RoomId } from "./types.socket.js";
|
||||||
|
|
||||||
|
// get the db data if not exiting already
|
||||||
|
const log = createLogger({ module: "socket.io", subModule: "roomService" });
|
||||||
|
|
||||||
|
export const preseedRoom = async (roomId: RoomId) => {
|
||||||
|
if (roomHistory.has(roomId)) {
|
||||||
|
return roomHistory.get(roomId);
|
||||||
|
}
|
||||||
|
|
||||||
|
const roomDef = roomDefinition[roomId];
|
||||||
|
|
||||||
|
if (!roomDef) {
|
||||||
|
log.error({}, `Room ${roomId} is not defined`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const latestData = await roomDef.seed(MAX_HISTORY);
|
||||||
|
|
||||||
|
roomHistory.set(roomId, latestData);
|
||||||
|
|
||||||
|
return latestData;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const createRoomEmitter = (io: Server) => {
|
||||||
|
const addDataToRoom = <T>(roomId: RoomId, payload: T) => {
|
||||||
|
if (!roomHistory.has(roomId)) {
|
||||||
|
roomHistory.set(roomId, []);
|
||||||
|
}
|
||||||
|
|
||||||
|
const history = roomHistory.get(roomId)!;
|
||||||
|
history?.push(payload);
|
||||||
|
|
||||||
|
if (history?.length > MAX_HISTORY) {
|
||||||
|
history?.shift();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!roomBuffers.has(roomId)) {
|
||||||
|
roomBuffers.set(roomId, []);
|
||||||
|
}
|
||||||
|
|
||||||
|
roomBuffers.get(roomId)!.push(payload);
|
||||||
|
|
||||||
|
if (!roomFlushTimers.has(roomId)) {
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
const buffered = roomBuffers.get(roomId) || [];
|
||||||
|
|
||||||
|
if (buffered.length > 0) {
|
||||||
|
io.to(roomId).emit("room-update", {
|
||||||
|
roomId,
|
||||||
|
payloads: buffered, // ✅ array now
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
roomBuffers.set(roomId, []);
|
||||||
|
roomFlushTimers.delete(roomId);
|
||||||
|
}, FLUSH_INTERVAL);
|
||||||
|
|
||||||
|
roomFlushTimers.set(roomId, timer);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return { addDataToRoom };
|
||||||
|
};
|
||||||
140
backend/socket.io/serverSetup.ts
Normal file
140
backend/socket.io/serverSetup.ts
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import type { Server as HttpServer } from "node:http";
|
||||||
|
//import { dirname, join } from "node:path";
|
||||||
|
//import { fileURLToPath } from "node:url";
|
||||||
|
import { instrument } from "@socket.io/admin-ui";
|
||||||
|
import { Server } from "socket.io";
|
||||||
|
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import { allowedOrigins } from "../utils/cors.utils.js";
|
||||||
|
import { registerEmitter } from "./roomEmitter.socket.js";
|
||||||
|
import { createRoomEmitter, preseedRoom } from "./roomService.socket.js";
|
||||||
|
|
||||||
|
//const __filename = fileURLToPath(import.meta.url);
|
||||||
|
//const __dirname = dirname(__filename);
|
||||||
|
const log = createLogger({ module: "socket.io", subModule: "setup" });
|
||||||
|
|
||||||
|
import { auth } from "../utils/auth.utils.js";
|
||||||
|
//import type { Session, User } from "better-auth"; // adjust if needed
|
||||||
|
import { protectedRooms } from "./roomDefinitions.socket.js";
|
||||||
|
|
||||||
|
// declare module "socket.io" {
|
||||||
|
// interface Socket {
|
||||||
|
// user?: User | any;
|
||||||
|
// session?: Session;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
export const setupSocketIORoutes = (baseUrl: string, server: HttpServer) => {
|
||||||
|
const io = new Server(server, {
|
||||||
|
path: `${baseUrl}/api/socket.io`,
|
||||||
|
cors: {
|
||||||
|
origin: allowedOrigins,
|
||||||
|
credentials: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// ✅ Create emitter instance
|
||||||
|
const { addDataToRoom } = createRoomEmitter(io);
|
||||||
|
registerEmitter(addDataToRoom);
|
||||||
|
|
||||||
|
io.use(async (socket, next) => {
|
||||||
|
try {
|
||||||
|
//const cookieHeader = socket.handshake.headers.cookie;
|
||||||
|
const headers = new Headers();
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(socket.request.headers)) {
|
||||||
|
if (typeof value === "string") {
|
||||||
|
headers.set(key, value);
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
headers.set(key, value.join(", "));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = await auth.api.getSession({
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!session) {
|
||||||
|
return next(); // allow connection, but no auth
|
||||||
|
}
|
||||||
|
|
||||||
|
if (session) {
|
||||||
|
socket.user = session.user;
|
||||||
|
socket.session = session as any;
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
} catch (err) {
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
io.on("connection", (s) => {
|
||||||
|
log.info({}, `User connected: ${s.id}`);
|
||||||
|
|
||||||
|
s.emit("welcome", {
|
||||||
|
serverTime: Date.now(),
|
||||||
|
availableRooms: ["logs", "labels"],
|
||||||
|
version: "1.0.0",
|
||||||
|
});
|
||||||
|
|
||||||
|
s.on("join-room", async (rn) => {
|
||||||
|
const config = protectedRooms[rn];
|
||||||
|
|
||||||
|
if (config?.requiresAuth && !s.user) {
|
||||||
|
return s.emit("room-error", {
|
||||||
|
room: rn,
|
||||||
|
message: "Authentication required",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const roles = Array.isArray(config.role) ? config.role : [config.role];
|
||||||
|
|
||||||
|
console.log(roles, s.user.role);
|
||||||
|
|
||||||
|
//if (config?.role && s.user?.role !== config.role) {
|
||||||
|
if (config?.role && !roles.includes(s.user?.role)) {
|
||||||
|
return s.emit("room-error", {
|
||||||
|
room: rn,
|
||||||
|
message: `Not authorized to be in room: ${rn}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
s.join(rn);
|
||||||
|
|
||||||
|
// get room seeded
|
||||||
|
const history = await preseedRoom(rn);
|
||||||
|
log.info({}, `User joined ${rn}: ${s.id}`);
|
||||||
|
// send the intial data
|
||||||
|
s.emit("room-update", {
|
||||||
|
roomId: rn,
|
||||||
|
payloads: history,
|
||||||
|
initial: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
s.on("leave-room", (room) => {
|
||||||
|
s.leave(room);
|
||||||
|
log.info({}, `${s.id} left room: ${room}`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
io.on("disconnect", (s) => {
|
||||||
|
log.info({}, "User disconnected:", s.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
// admin stuff
|
||||||
|
|
||||||
|
const admin = io.of("/admin");
|
||||||
|
admin.on("connection", (s) => {
|
||||||
|
log.info({}, `User connected: ${s.id}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
admin.on("disconnect", (s) => {
|
||||||
|
log.info({}, "User disconnected:", s.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
instrument(io, {
|
||||||
|
auth: false,
|
||||||
|
//namespaceName: "/admin",
|
||||||
|
});
|
||||||
|
};
|
||||||
1
backend/socket.io/types.socket.ts
Normal file
1
backend/socket.io/types.socket.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export type RoomId = "logs" | "labels"; //| "alerts" | "metrics";
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
import { eq } from "drizzle-orm";
|
|
||||||
import type { Express } from "express";
|
|
||||||
import { db } from "../db/db.controller.js";
|
|
||||||
import { datamart } from "../db/schema/datamart.schema.js";
|
|
||||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
|
||||||
import addQuery from "./datamartAdd.route.js";
|
|
||||||
import updateQuery from "./datamartUpdate.route.js";
|
|
||||||
import runQuery from "./getDatamart.route.js";
|
|
||||||
|
|
||||||
export const setupDatamartRoutes = (baseUrl: string, app: Express) => {
|
|
||||||
//setup all the routes
|
|
||||||
|
|
||||||
app.use(`${baseUrl}/api/datamart`, runQuery);
|
|
||||||
app.use(`${baseUrl}/api/datamart`, addQuery);
|
|
||||||
app.use(`${baseUrl}/api/datamart`, updateQuery);
|
|
||||||
|
|
||||||
// just sending a get on datamart will return all the queries that we can call.
|
|
||||||
app.get(`${baseUrl}/api/datamart`, async (_, res) => {
|
|
||||||
const queries = await db
|
|
||||||
.select({
|
|
||||||
name: datamart.name,
|
|
||||||
description: datamart.description,
|
|
||||||
options: datamart.options,
|
|
||||||
version: datamart.version,
|
|
||||||
upd_date: datamart.upd_date,
|
|
||||||
})
|
|
||||||
.from(datamart)
|
|
||||||
.where(eq(datamart.active, true));
|
|
||||||
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: true,
|
|
||||||
level: "info",
|
|
||||||
module: "datamart",
|
|
||||||
subModule: "query",
|
|
||||||
message: "All active queries we can run",
|
|
||||||
data: queries,
|
|
||||||
status: 200,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
import fs from "node:fs";
|
|
||||||
import { Router } from "express";
|
|
||||||
import multer from "multer";
|
|
||||||
import z from "zod";
|
|
||||||
import { db } from "../db/db.controller.js";
|
|
||||||
import { datamart, type NewDatamart } from "../db/schema/datamart.schema.js";
|
|
||||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
|
||||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
|
||||||
|
|
||||||
const r = Router();
|
|
||||||
const upload = multer({ dest: "uploads/" });
|
|
||||||
|
|
||||||
const newQuery = z.object({
|
|
||||||
name: z.string().min(5),
|
|
||||||
description: z.string().min(30),
|
|
||||||
query: z.string().min(10).optional(),
|
|
||||||
options: z
|
|
||||||
.string()
|
|
||||||
.describe("This should be a set of keys separated by a comma")
|
|
||||||
.optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
r.post("/", upload.single("queryFile"), async (req, res) => {
|
|
||||||
try {
|
|
||||||
const v = newQuery.parse(req.body);
|
|
||||||
|
|
||||||
const query: NewDatamart = {
|
|
||||||
...v,
|
|
||||||
name: v.name?.trim().replaceAll(" ", "_"),
|
|
||||||
};
|
|
||||||
|
|
||||||
//console.log(query);
|
|
||||||
if (req.file) {
|
|
||||||
const sqlContents = fs.readFileSync(req.file.path, "utf8");
|
|
||||||
query.query = sqlContents;
|
|
||||||
|
|
||||||
// optional: delete temp file afterwards
|
|
||||||
fs.unlink(req.file.path, () => {});
|
|
||||||
}
|
|
||||||
|
|
||||||
// if we forget the file crash out
|
|
||||||
if (!query.query) {
|
|
||||||
// no query text anywhere
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: true,
|
|
||||||
level: "info", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message: `${query.name} missing sql file to parse`,
|
|
||||||
data: [],
|
|
||||||
status: 400, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// // if we didn't replace the test1 stuff crash out
|
|
||||||
// if (!query.query.includes("test1")) {
|
|
||||||
// return apiReturn(res, {
|
|
||||||
// success: true,
|
|
||||||
// level: "info", //connect.success ? "info" : "error",
|
|
||||||
// module: "routes",
|
|
||||||
// subModule: "datamart",
|
|
||||||
// message:
|
|
||||||
// "Query must include the 'test1' or everything switched to test1",
|
|
||||||
// data: [],
|
|
||||||
// status: 400, //connect.success ? 200 : 400,
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
const { data, error } = await tryCatch(db.insert(datamart).values(query));
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: true,
|
|
||||||
level: "error", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message: `${query.name} encountered an error while being added`,
|
|
||||||
data: [error.cause],
|
|
||||||
status: 200, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data) {
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: true,
|
|
||||||
level: "info", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message: `${query.name} was just added`,
|
|
||||||
data: [query],
|
|
||||||
status: 200, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof z.ZodError) {
|
|
||||||
const flattened = z.flattenError(err);
|
|
||||||
// return res.status(400).json({
|
|
||||||
// error: "Validation failed",
|
|
||||||
// details: flattened,
|
|
||||||
// });
|
|
||||||
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: false,
|
|
||||||
level: "error", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "auth",
|
|
||||||
message: "Validation failed",
|
|
||||||
data: [flattened],
|
|
||||||
status: 400, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: false,
|
|
||||||
level: "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message: "There was an error creating the new query",
|
|
||||||
data: [err],
|
|
||||||
status: 200,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
export default r;
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
/**
|
|
||||||
* If we are running in client mode we want to periodically check the SERVER_NAME for new/updates queries
|
|
||||||
* this will be on a cronner job, we will check 2 times a day for new data, we will also have a route we can trigger to check this manually incase we have
|
|
||||||
* queries we make for one plant but will eventually go to all plants.
|
|
||||||
* in client mode we will not be able to add, update, or delete, or push updates
|
|
||||||
*
|
|
||||||
* if we are running on server mode we will provide all queries.
|
|
||||||
* when pushing to another server we will allow all or just a single server by plant token.
|
|
||||||
* allow for new queries to be added
|
|
||||||
* allow for queries to be updated by id
|
|
||||||
* table will be
|
|
||||||
* id
|
|
||||||
* name
|
|
||||||
* description
|
|
||||||
* query
|
|
||||||
* version
|
|
||||||
* active
|
|
||||||
* options (string ie start,end)
|
|
||||||
* add_date
|
|
||||||
* add_user
|
|
||||||
* upd_date
|
|
||||||
* upd_user
|
|
||||||
*
|
|
||||||
* if we are running in localhost or dev or just someone running the server on there computer but using localhost we will allow to push to the main server the SERVER_NAME in the env should point to the main server
|
|
||||||
* that way when we check if we are in production we will know.
|
|
||||||
* the node env must also be set non production in order to push to the main server.
|
|
||||||
* we will also be able to do all the same as the server mode but the push here will just go to the main server.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// doing the client stuff first
|
|
||||||
|
|
||||||
// ┌──────────────── (optional) second (0 - 59)
|
|
||||||
// │ ┌────────────── minute (0 - 59)
|
|
||||||
// │ │ ┌──────────── hour (0 - 23)
|
|
||||||
// │ │ │ ┌────────── day of month (1 - 31)
|
|
||||||
// │ │ │ │ ┌──────── month (1 - 12, JAN-DEC)
|
|
||||||
// │ │ │ │ │ ┌────── day of week (0 - 6, SUN-Mon)
|
|
||||||
// │ │ │ │ │ │ (0 to 6 are Sunday to Saturday; 7 is Sunday, the same as 0)
|
|
||||||
// │ │ │ │ │ │
|
|
||||||
// * * * * * *
|
|
||||||
if (process.env.NODE_ENV?.trim() === "production") {
|
|
||||||
// setup cronner
|
|
||||||
let cronTime = "* 5 * * * *";
|
|
||||||
if (process.env.QUERY_TIME_TYPE === "m") {
|
|
||||||
// will run this cron ever x
|
|
||||||
cronTime = `* ${process.env.QUERY_CHECK} * * * *`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.env.QUERY_TIME_TYPE === "h") {
|
|
||||||
// will run this cron ever x
|
|
||||||
cronTime = `* * ${process.env.QUERY_CHECK} * * * `;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.env.QUERY_TIME_TYPE === "d") {
|
|
||||||
// will run this cron ever x
|
|
||||||
cronTime = `* * * * * ${process.env.QUERY_CHECK}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.info(cronTime);
|
|
||||||
}
|
|
||||||
@@ -1,156 +0,0 @@
|
|||||||
import fs from "node:fs";
|
|
||||||
import { eq, sql } from "drizzle-orm";
|
|
||||||
import { Router } from "express";
|
|
||||||
import multer from "multer";
|
|
||||||
import z from "zod";
|
|
||||||
import { db } from "../db/db.controller.js";
|
|
||||||
import { datamart } from "../db/schema/datamart.schema.js";
|
|
||||||
import { apiReturn } from "../utils/returnHelper.utils.js";
|
|
||||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
|
||||||
|
|
||||||
const r = Router();
|
|
||||||
const upload = multer({ dest: "uploads/" });
|
|
||||||
|
|
||||||
const newQuery = z.object({
|
|
||||||
name: z.string().min(5).optional(),
|
|
||||||
description: z.string().min(30).optional(),
|
|
||||||
query: z.string().min(10).optional(),
|
|
||||||
options: z
|
|
||||||
.string()
|
|
||||||
.describe("This should be a set of keys separated by a comma")
|
|
||||||
.optional(),
|
|
||||||
setActive: z.string().optional(),
|
|
||||||
active: z.boolean().optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
r.patch("/:id", upload.single("queryFile"), async (req, res) => {
|
|
||||||
const { id } = req.params;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const v = newQuery.parse(req.body);
|
|
||||||
|
|
||||||
const query = {
|
|
||||||
...v,
|
|
||||||
};
|
|
||||||
|
|
||||||
//console.log(query);
|
|
||||||
if (req.file) {
|
|
||||||
const sqlContents = fs.readFileSync(req.file.path, "utf8");
|
|
||||||
query.query = sqlContents;
|
|
||||||
|
|
||||||
// optional: delete temp file afterwards
|
|
||||||
fs.unlink(req.file.path, () => {});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (v.name) {
|
|
||||||
query.name = v.name.trim().replaceAll(" ", "_");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (v.description) {
|
|
||||||
query.options = v.description;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (v.options) {
|
|
||||||
query.options = v.options;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (v.setActive) {
|
|
||||||
query.active = v.setActive === "true";
|
|
||||||
}
|
|
||||||
|
|
||||||
// if we forget the file crash out
|
|
||||||
// if (!query.query) {
|
|
||||||
// // no query text anywhere
|
|
||||||
// return apiReturn(res, {
|
|
||||||
// success: true,
|
|
||||||
// level: "info", //connect.success ? "info" : "error",
|
|
||||||
// module: "routes",
|
|
||||||
// subModule: "datamart",
|
|
||||||
// message: `${query.name} missing sql file to parse`,
|
|
||||||
// data: [],
|
|
||||||
// status: 400, //connect.success ? 200 : 400,
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // if we didn't replace the test1 stuff crash out
|
|
||||||
|
|
||||||
if (query.query && !query.query.includes("test1")) {
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: true,
|
|
||||||
level: "error", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message:
|
|
||||||
"All queries must point to test1 this way we can keep it dynamic.",
|
|
||||||
data: [],
|
|
||||||
status: 400, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data, error } = await tryCatch(
|
|
||||||
db
|
|
||||||
.update(datamart)
|
|
||||||
.set({
|
|
||||||
...query,
|
|
||||||
version: sql`${datamart.version} + 1`,
|
|
||||||
upd_date: sql`NOW()`,
|
|
||||||
upd_user: "lst_user",
|
|
||||||
})
|
|
||||||
.where(eq(datamart.id, id as string)),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: true,
|
|
||||||
level: "error", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message: `${query.name} encountered an error while being updated`,
|
|
||||||
data: [error.cause],
|
|
||||||
status: 200, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data) {
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: true,
|
|
||||||
level: "info", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message: `${query.name} was just updated`,
|
|
||||||
data: [],
|
|
||||||
status: 200, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof z.ZodError) {
|
|
||||||
const flattened = z.flattenError(err);
|
|
||||||
// return res.status(400).json({
|
|
||||||
// error: "Validation failed",
|
|
||||||
// details: flattened,
|
|
||||||
// });
|
|
||||||
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: false,
|
|
||||||
level: "error", //connect.success ? "info" : "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "auth",
|
|
||||||
message: "Validation failed",
|
|
||||||
data: [flattened],
|
|
||||||
status: 400, //connect.success ? 200 : 400,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return apiReturn(res, {
|
|
||||||
success: false,
|
|
||||||
level: "error",
|
|
||||||
module: "routes",
|
|
||||||
subModule: "datamart",
|
|
||||||
message: "There was an error updating the query",
|
|
||||||
data: [err],
|
|
||||||
status: 200,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
export default r;
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
import pino, { type Logger } from "pino";
|
|
||||||
|
|
||||||
export const logLevel = process.env.LOG_LEVEL || "info";
|
|
||||||
const isDev = process.env.NODE_ENV !== "production";
|
|
||||||
const transport = pino.transport({
|
|
||||||
targets: [
|
|
||||||
{
|
|
||||||
target: "pino-pretty",
|
|
||||||
options: {
|
|
||||||
colorize: true,
|
|
||||||
singleLine: "true",
|
|
||||||
destination: process.stdout.fd,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
target: isDev ? "./db.transport.ts" : "./db.transport.js",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const rootLogger: Logger = pino(
|
|
||||||
{
|
|
||||||
level: logLevel,
|
|
||||||
redact: { paths: ["email", "password"], remove: true },
|
|
||||||
},
|
|
||||||
transport,
|
|
||||||
// pino.multistream([
|
|
||||||
// // Pretty print to console in dev
|
|
||||||
// ...(isDev
|
|
||||||
// ? [
|
|
||||||
// {
|
|
||||||
// stream: pino.transport({
|
|
||||||
// target: "pino-pretty",
|
|
||||||
// options: { colorize: true },
|
|
||||||
// }),
|
|
||||||
// },
|
|
||||||
// ]
|
|
||||||
// : []),
|
|
||||||
// // Always log to database
|
|
||||||
// {
|
|
||||||
// level: "info",
|
|
||||||
// stream: dbStream,
|
|
||||||
// },
|
|
||||||
// ]),
|
|
||||||
);
|
|
||||||
|
|
||||||
export const createLogger = (bindings: Record<string, unknown>): Logger => {
|
|
||||||
return rootLogger.child(bindings);
|
|
||||||
};
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
import type { Express } from "express";
|
|
||||||
import restart from "./prodSqlRestart.route.js";
|
|
||||||
import start from "./prodSqlStart.route.js";
|
|
||||||
import stop from "./prodSqlStop.route.js";
|
|
||||||
export const setupProdSqlRoutes = (baseUrl: string, app: Express) => {
|
|
||||||
//setup all the routes
|
|
||||||
app.use(`${baseUrl}/api/system/prodSql`, start);
|
|
||||||
app.use(`${baseUrl}/api/system/prodSql`, stop);
|
|
||||||
app.use(`${baseUrl}/api/system/prodSql`, restart);
|
|
||||||
};
|
|
||||||
44
backend/system/settings.route.ts
Normal file
44
backend/system/settings.route.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import { type Response, Router } from "express";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { settings } from "../db/schema/settings.schema.js";
|
||||||
|
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
// export const updateSetting = async (setting: Setting) => {
|
||||||
|
// // TODO: when the setting is a feature setting we will need to have it run each kill switch on the crons well just stop them and during a reset it just wont start them
|
||||||
|
// // TODO: when the setting is a system we will need to force an app restart
|
||||||
|
// // TODO: when the setting is standard we don't do anything.
|
||||||
|
// };
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.get("/", async (_, res: Response) => {
|
||||||
|
const { data: sName, error: sError } = await tryCatch(
|
||||||
|
db.select().from(settings).orderBy(settings.name),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (sError) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "system",
|
||||||
|
subModule: "settings",
|
||||||
|
message: `There was an error getting the settings `,
|
||||||
|
data: [sError],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "system",
|
||||||
|
subModule: "settings",
|
||||||
|
message: `All current settings`,
|
||||||
|
data: sName ?? [],
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default r;
|
||||||
334
backend/system/settingsBase.controller.ts
Normal file
334
backend/system/settingsBase.controller.ts
Normal file
@@ -0,0 +1,334 @@
|
|||||||
|
import { sql } from "drizzle-orm";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { type NewSetting, settings } from "../db/schema/settings.schema.js";
|
||||||
|
import { createLogger } from "../logger/logger.controller.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
|
||||||
|
const newSettings: NewSetting[] = [
|
||||||
|
// feature settings
|
||||||
|
{
|
||||||
|
name: "opendock_sync",
|
||||||
|
value: "0",
|
||||||
|
active: false,
|
||||||
|
description: "Dock Scheduling system",
|
||||||
|
moduleName: "opendock",
|
||||||
|
settingType: "feature",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ocp",
|
||||||
|
value: "1",
|
||||||
|
active: false,
|
||||||
|
description: "One click print",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "feature",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ocme",
|
||||||
|
value: "0",
|
||||||
|
active: false,
|
||||||
|
description: "Dayton Agv system",
|
||||||
|
moduleName: "ocme",
|
||||||
|
settingType: "feature",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "demandManagement",
|
||||||
|
value: "1",
|
||||||
|
active: false,
|
||||||
|
description: "Fake EDI System",
|
||||||
|
moduleName: "demandManagement",
|
||||||
|
settingType: "feature",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "qualityRequest",
|
||||||
|
value: "0",
|
||||||
|
active: false,
|
||||||
|
description: "Quality System",
|
||||||
|
moduleName: "qualityRequest",
|
||||||
|
settingType: "feature",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tms",
|
||||||
|
value: "0",
|
||||||
|
active: false,
|
||||||
|
description: "Transport system integration",
|
||||||
|
moduleName: "tms",
|
||||||
|
settingType: "feature",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
|
||||||
|
// standard settings
|
||||||
|
{
|
||||||
|
name: "prolinkCheck",
|
||||||
|
value: "1",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"Will prolink be considered to check if matches, mainly used in plants that do not fully utilize prolink + ocp",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bookin",
|
||||||
|
value: "1",
|
||||||
|
active: true,
|
||||||
|
description: "Will we book in the labels after they are printed.",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin", "supervisor"],
|
||||||
|
seedVersion: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "printDelay",
|
||||||
|
value: "90",
|
||||||
|
active: true,
|
||||||
|
description: "The default time between label printing",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dualPrinting",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description: "Are we producing on 2 lines that pack into the 1 packer",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "fifoCheck",
|
||||||
|
value: "45",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"This check is used to do a more near fifo check when pulling pallets for the agv's",
|
||||||
|
moduleName: "ocme",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dayCheck",
|
||||||
|
value: "3",
|
||||||
|
active: true,
|
||||||
|
description: "how many days +/- to check for shipments in alplaprod",
|
||||||
|
moduleName: "ocme",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "maxLotsPerTruck",
|
||||||
|
value: "3",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"What is the maximum amount of lots that can be pulled for a truck this",
|
||||||
|
moduleName: "ocme",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "monitorAddress",
|
||||||
|
value: "8",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"What address(2) are we monitoring for lot restrictions. multiple addresses can be used but should be separated by a comma ,",
|
||||||
|
moduleName: "ocme",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ocmeCycleCount",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"Are you enabling the system count page? meaning that we will allow a 'manual cycle count' vs full auto",
|
||||||
|
moduleName: "ocme",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "inhouseDelivery",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"This is for in-house plants that deliver direct to the customer, note if book-in is off this will be ignored ",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dycoConnect",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description: "Connection to the dyco in dayton for the labeling process",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dycoPrint",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"This tells us we want to use the dyco system to print the labels",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "strapperCheck",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"Strapper alarm check this is more used in tandem with dycoPrint setting",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "rfid_ocp",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"Enable the rfid pallet mgt system this replaces dycoPrint but can be overridden by dycoPrint",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ocpCycleDelay",
|
||||||
|
value: "10",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"How long is the delay to fire off the printer check status, Defaulting to 10 seconds",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "pNgAddress",
|
||||||
|
value: "139",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"This is the P&G address that is used when uploading the new forecast from P&G",
|
||||||
|
moduleName: "demandManagement",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "zechetti_1",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"Active the Zechetti 1 process, to print from and no longer utilize the pc",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "zechetti_2",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"Active the Zechetti 2 process, to print from and no longer utilize the pc",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "checkColor",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"During the material check are we going to check the color has enough provided to create the next pallet",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "checkPKG",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"During the material check are we going to check the packaging has enough provided to create the next pallet",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "lotPrintDelay",
|
||||||
|
value: "0",
|
||||||
|
active: true,
|
||||||
|
description:
|
||||||
|
"Override ride the printer printer delay setting, by default if this is on all printers are controlled by this.",
|
||||||
|
moduleName: "ocp",
|
||||||
|
settingType: "standard",
|
||||||
|
roles: ["admin"],
|
||||||
|
seedVersion: 1,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
export const baseSettingValidationCheck = async () => {
|
||||||
|
const log = createLogger({ module: "system", subModule: "settings" });
|
||||||
|
const { data, error } = await tryCatch(
|
||||||
|
db
|
||||||
|
.insert(settings)
|
||||||
|
.values(newSettings)
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: settings.name,
|
||||||
|
set: {
|
||||||
|
roles: sql`excluded.roles`,
|
||||||
|
description: sql`excluded.description`,
|
||||||
|
moduleName: sql`excluded."moduleName"`,
|
||||||
|
settingType: sql`excluded."settingType"`,
|
||||||
|
seedVersion: sql`excluded.seed_version`,
|
||||||
|
upd_user: "LST_System",
|
||||||
|
upd_date: sql`now()`,
|
||||||
|
},
|
||||||
|
where: sql`
|
||||||
|
settings.seed_version IS NULL
|
||||||
|
OR settings.seed_version < excluded.seed_version
|
||||||
|
`,
|
||||||
|
})
|
||||||
|
.returning(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
log.error(
|
||||||
|
{ error: error },
|
||||||
|
"There was an error when adding or updating the settings.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data) {
|
||||||
|
log.info({}, "All Settings were added/updated");
|
||||||
|
}
|
||||||
|
};
|
||||||
38
backend/system/settingsFeatures.controller.ts
Normal file
38
backend/system/settingsFeatures.controller.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
/**
|
||||||
|
* When a feature setting gets updated we will handle it here.
|
||||||
|
* we will stop jobs, stop cycles
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { dbCleanup } from "../db/dbCleanup.controller.js";
|
||||||
|
import type { Setting } from "../db/schema/settings.schema.js";
|
||||||
|
import { monitorReleaseChanges } from "../opendock/openDockRreleaseMonitor.utils.js";
|
||||||
|
import {
|
||||||
|
killOpendockSocket,
|
||||||
|
opendockSocketMonitor,
|
||||||
|
} from "../opendock/opendockSocketMonitor.utils.js";
|
||||||
|
import {
|
||||||
|
createCronJob,
|
||||||
|
resumeCronJob,
|
||||||
|
stopCronJob,
|
||||||
|
} from "../utils/croner.utils.js";
|
||||||
|
|
||||||
|
export const featureControl = async (data: Setting) => {
|
||||||
|
// when a feature is changed to active or deactivated we will update the cron.
|
||||||
|
if (data.active) {
|
||||||
|
resumeCronJob(data.name);
|
||||||
|
} else {
|
||||||
|
stopCronJob(data.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// specific setting stuff should have handled like below. what needs turned back on or off.
|
||||||
|
if (data.name === "opendock_sync" && data.active) {
|
||||||
|
opendockSocketMonitor();
|
||||||
|
monitorReleaseChanges();
|
||||||
|
createCronJob("opendockAptCleanup", "0 30 5 * * *", () =>
|
||||||
|
dbCleanup("opendockApt", 90),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
killOpendockSocket();
|
||||||
|
stopCronJob("opendockAptCleanup");
|
||||||
|
}
|
||||||
|
};
|
||||||
110
backend/system/settingsUpdate.route.ts
Normal file
110
backend/system/settingsUpdate.route.ts
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import { eq, sql } from "drizzle-orm";
|
||||||
|
import { type Request, type Response, Router } from "express";
|
||||||
|
import { db } from "../db/db.controller.js";
|
||||||
|
import { settings } from "../db/schema/settings.schema.js";
|
||||||
|
|
||||||
|
import { apiReturn } from "../utils/returnHelper.utils.js";
|
||||||
|
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||||
|
import { featureControl } from "./settingsFeatures.controller.js";
|
||||||
|
|
||||||
|
// // TODO: when the setting is a system we will need to force an app restart
|
||||||
|
// // TODO: when the setting is standard we don't do anything.
|
||||||
|
// };
|
||||||
|
|
||||||
|
const r = Router();
|
||||||
|
|
||||||
|
r.patch("/:name", async (req: Request, res: Response) => {
|
||||||
|
const { name } = req.params;
|
||||||
|
const updates: Record<string, unknown | null> = {};
|
||||||
|
// lets see if we even have a setting name
|
||||||
|
|
||||||
|
const { data: sName, error: sError } = await tryCatch(
|
||||||
|
db
|
||||||
|
.select()
|
||||||
|
.from(settings)
|
||||||
|
.where(eq(settings.name, name ?? "")),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (sError) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "system",
|
||||||
|
subModule: "settings",
|
||||||
|
message: `There was an error checking the name of the setting`,
|
||||||
|
data: [sError],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sName?.length === 0) {
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: false,
|
||||||
|
level: "error",
|
||||||
|
module: "system",
|
||||||
|
subModule: "settings",
|
||||||
|
message: `The setting "${name}" dose not appear to be a valid setting please check the name and try again. `,
|
||||||
|
data: [],
|
||||||
|
status: 400,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// manage the actual setting. we will still do an upsert just in case we strangely get past everything
|
||||||
|
|
||||||
|
if (req.body?.value !== undefined) {
|
||||||
|
updates.value = req.body.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.body?.description !== undefined) {
|
||||||
|
updates.description = req.body.description;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.body?.moduleName !== undefined) {
|
||||||
|
updates.moduleName = req.body.moduleName;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.body?.active !== undefined) {
|
||||||
|
updates.active = req.body.active === "true";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.body?.roles !== undefined) {
|
||||||
|
updates.roles = req.body.roles;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.body?.settingType !== undefined) {
|
||||||
|
updates.settingType = req.body.settingType;
|
||||||
|
}
|
||||||
|
|
||||||
|
updates.upd_user = req.user?.username || "lst_user";
|
||||||
|
updates.upd_date = sql`NOW()`;
|
||||||
|
|
||||||
|
const updatedSetting = await db
|
||||||
|
.update(settings)
|
||||||
|
.set(updates)
|
||||||
|
.where(eq(settings.name, name ?? ""))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
// the switch statment will only run when the setting actually updates
|
||||||
|
switch (updatedSetting[0]?.settingType) {
|
||||||
|
case "feature":
|
||||||
|
await featureControl(updatedSetting[0]);
|
||||||
|
break;
|
||||||
|
case "system":
|
||||||
|
// TODO: add the system control logic in to restart the app if not in dev mode
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiReturn(res, {
|
||||||
|
success: true,
|
||||||
|
level: "info",
|
||||||
|
module: "system",
|
||||||
|
subModule: "settings",
|
||||||
|
message: `Setting "${name}" Was just updated. `,
|
||||||
|
data: updatedSetting,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default r;
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user