db cleanups and logging for od

This commit is contained in:
2026-02-20 09:58:20 -06:00
parent 597d990a69
commit 2d1f613d39
17 changed files with 2452 additions and 53 deletions

View File

@@ -57,6 +57,7 @@
"manadatory",
"OCME",
"onnotice",
"opendock",
"ppoo",
"prodlabels"
],

View File

@@ -5,12 +5,11 @@ import type { Express } from "express";
//const __filename = fileURLToPath(import.meta.url);
// const __dirname = path.dirname(__filename);
import os from "node:os";
import { apiReference } from "@scalar/express-api-reference";
// const port = 3000;
import type { OpenAPIV3_1 } from "openapi-types";
import { datamartAddSpec } from "../scaler/datamartAdd.spec.js";
import { datamartUpdateSpec } from "../scaler/datamartUpdate.spec.js";
import { getDatamartSpec } from "../scaler/getDatamart.spec.js";
import { cronerActiveJobs } from "../scaler/cronerActiveJobs.spec.js";
import { prodLoginSpec } from "../scaler/login.spec.js";
import { prodRestartSpec } from "../scaler/prodSqlRestart.spec.js";
import { prodStartSpec } from "../scaler/prodSqlStart.spec.js";
@@ -28,7 +27,8 @@ export const openApiBase: OpenAPIV3_1.Document = {
},
servers: [
{
url: `http://localhost:3000${process.env.NODE_ENV?.trim() !== "production" ? "/lst" : "/"}`,
// TODO: change this to the https:// if we are in production and port if not.
url: `http://${os.hostname()}:3000${process.env.NODE_ENV?.trim() !== "production" ? "/lst" : "/"}`,
description: "Development server",
},
],
@@ -73,9 +73,8 @@ export const openApiBase: OpenAPIV3_1.Document = {
description: "All system endpoints that will be available to run",
},
{
name: "Datamart",
description:
"All Special queries to run based on there names.\n Refer to the docs to see all possible queries that can be ran here, you can also run the getQueries to see available.",
name: "Utils",
description: "All routes related to the utilities on the server",
},
// { name: "TMS", description: "TMS integration" },
],
@@ -83,14 +82,21 @@ export const openApiBase: OpenAPIV3_1.Document = {
};
export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
const mergedDatamart = {
"/api/datamart": {
...(getDatamartSpec["/api/datamart"] ?? {}),
...(datamartAddSpec["/api/datamart"] ?? {}),
...(datamartUpdateSpec["/api/datamart"] ?? {}),
},
"/api/datamart/{name}": getDatamartSpec["/api/datamart/{name}"],
};
// const mergedDatamart = {
// "/api/datamart": {
// ...(cronerActiveJobs["/api/datamart"] ?? {}),
// ...(datamartAddSpec["/api/datamart"] ?? {}),
// ...(datamartUpdateSpec["/api/datamart"] ?? {}),
// },
// "/api/datamart/{name}": getDatamartSpec["/api/datamart/{name}"],
// };
// const mergeUtils = {
// "/api/utils/croner": {
// ...(cronerActiveJobs["/api/utils/croner"] ?? {}),
// },
// "/api/utils/{name}": cronerActiveJobs["/api/utils/{name}"],
// };
const fullSpec = {
...openApiBase,
@@ -101,7 +107,8 @@ export const setupApiDocsRoutes = (baseUrl: string, app: Express) => {
...prodRestartSpec,
...prodLoginSpec,
...prodRegisterSpec,
...mergedDatamart,
//...mergedDatamart,
...cronerActiveJobs,
// Add more specs here as you build features
},

View File

@@ -0,0 +1,72 @@
import { createLogger } from "../logger/logger.controller.js";
import { delay } from "../utils/delay.utils.js";
import { db } from "./db.controller.js";
type DBCount = {
count: string;
};
const tableMap = {
logs: "logs",
jobs: "job_audit_log",
opendockApt: "opendock_apt",
} as const;
type CleanupTable = keyof typeof tableMap;
/**
* We will clean up the db based on age.
* @param name database to run the cleanup on
* @param daysToKeep optional default will be 90 days
*/
export const dbCleanup = async (name: CleanupTable, daysToKeep?: number) => {
const log = createLogger({ module: "db", subModule: "cleanup" });
// TODO: send backup of this to another server, via post or something maybe have to reduce the limit but well tackle that later.
if (!daysToKeep) {
daysToKeep = 90;
}
const limit = 1000;
const delayTime = 250;
let rowsDeleted: number;
const dbCount = (await db.execute(
`select count(*) from public.${tableMap[name]} WHERE created_at < NOW() - INTERVAL '${daysToKeep} days'`,
)) as DBCount[];
const loopCount = Math.ceil(
parseInt(dbCount[0]?.count ?? `${limit}`, 10) / limit,
);
if (parseInt(dbCount[0]?.count ?? `${limit}`, 10) > 1) {
log.info(
`Table clean up for: ${name}, that are older than ${daysToKeep} day, will be removed, There is ${loopCount} loops to be completed, Approx time: ${((loopCount * delayTime) / 1000 / 60).toFixed(2)} min(s).`,
);
} else {
log.info(`Table clean up for: ${name}, Currently has nothing to clean up.`);
return;
}
do {
// cleanup logs
const deleted = await db.execute(`
DELETE FROM public.${tableMap[name]}
WHERE id IN (
SELECT id
FROM public.${tableMap[name]}
WHERE created_at < NOW() - INTERVAL '${daysToKeep} days'
ORDER BY created_at
LIMIT ${limit}
)
RETURNING id;
`);
rowsDeleted = deleted.length;
if (rowsDeleted > 0) {
await delay(delayTime);
}
} while (rowsDeleted === limit);
log.info(`Table clean up for: ${name}, Has completed.`);
};

View File

@@ -1,4 +1,5 @@
import {
index,
integer,
jsonb,
pgTable,
@@ -9,17 +10,29 @@ import {
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import type { z } from "zod";
export const jobAuditLog = pgTable("job_audit_log", {
id: uuid("id").defaultRandom().primaryKey(),
jobName: text("job_name"),
startedAt: timestamp("start_at"),
finishedAt: timestamp("finished_at"),
durationMs: integer("duration_ms"),
status: text("status"), //success | error
errorMessage: text("error_message"),
errorStack: text("error_stack"),
metadata: jsonb("meta_data"),
});
export const jobAuditLog = pgTable(
"job_audit_log",
{
id: uuid("id").defaultRandom().primaryKey(),
jobName: text("job_name"),
startedAt: timestamp("start_at"),
finishedAt: timestamp("finished_at"),
durationMs: integer("duration_ms"),
status: text("status"), //success | error
errorMessage: text("error_message"),
errorStack: text("error_stack"),
metadata: jsonb("meta_data"),
createdAt: timestamp("created_at").defaultNow(),
},
(table) => {
return {
cleanupIdx: index("idx_job_audit_logs_cleanup").on(
table.startedAt,
table.id,
),
};
},
);
export const jobAuditLogSchema = createSelectSchema(jobAuditLog);
export const newJobAuditLogSchema = createInsertSchema(jobAuditLog);

View File

@@ -9,7 +9,7 @@ import {
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import type { z } from "zod";
export const opendockApt = pgTable("opendock.apt", {
export const opendockApt = pgTable("opendock_apt", {
id: uuid("id").defaultRandom().primaryKey(),
release: integer("release").unique(),
openDockAptId: text("open_dock_apt_id").notNull(),

View File

@@ -4,6 +4,7 @@ import { formatInTimeZone } from "date-fns-tz";
import { sql } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { opendockApt } from "../db/schema/opendock.schema.js";
import { createLogger } from "../logger/logger.controller.js";
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
import {
type SqlQuery,
@@ -23,8 +24,6 @@ let lastCheck = formatInTimeZone(
//const queue: unknown[] = [];
//const isProcessing: boolean = false;
let odToken: string | null = null;
type Releases = {
ReleaseNumber: number;
DeliveryState: number;
@@ -36,10 +35,30 @@ type Releases = {
LineItemArticleWeight: number;
CustomerReleaseNumber: string;
};
type ODToken = {
odToken: string | null;
tokenDate: Date | null;
};
let odToken: ODToken = {
odToken: null,
tokenDate: new Date(),
};
const TWENTY_FOUR_HOURS = 24 * 60 * 60 * 1000;
const log = createLogger({ module: "opendock", subModule: "releaseMonitor" });
const postRelease = async (release: Releases) => {
if (!odToken) {
console.info("Getting token");
// TODO: add a time stamp on the token so it gets refreshed every 24hours
if (!odToken.odToken) {
log.info("Getting Auth Token");
await getToken();
}
if (
new Date(odToken.tokenDate || Date.now()).getTime() <
Date.now() - TWENTY_FOUR_HOURS
) {
log.info("Refreshing Auth Token");
await getToken();
}
/**
@@ -145,7 +164,7 @@ const postRelease = async (release: Releases) => {
);
if (aptError) {
console.error("Error getting apt data: ", aptError);
log.error({ error: aptError }, "Error getting apt data");
// TODO: send an error email on this one as it will cause issues
return;
}
@@ -163,13 +182,13 @@ const postRelease = async (release: Releases) => {
{
headers: {
"content-type": "application/json; charset=utf-8",
Authorization: `Bearer ${odToken}`,
Authorization: `Bearer ${odToken.odToken}`,
},
},
);
if (response.status === 400) {
console.error(response.data.data.message);
log.error({}, response.data.data.message);
return;
}
@@ -188,13 +207,13 @@ const postRelease = async (release: Releases) => {
})
.returning();
console.info(`${release.ReleaseNumber} was updated`);
log.info(`${release.ReleaseNumber} was updated`);
} catch (e) {
console.error(e);
log.error(e);
}
} catch (e: any) {
//console.info(newDockApt);
console.error(e.response.data);
log.error(e.response.data);
return;
}
@@ -215,7 +234,7 @@ const postRelease = async (release: Releases) => {
// this will be utilized when we are listening for the changes to the apts. that way we can update the state to arrived. we will run our own checks on this guy during the incoming messages.
if (response.status === 400) {
console.error(response.data.data.message);
log.error(response.data.data.message);
return;
}
@@ -236,12 +255,12 @@ const postRelease = async (release: Releases) => {
})
.returning();
console.info(`${release.ReleaseNumber} was created`);
log.info(`${release.ReleaseNumber} was created`);
} catch (e) {
console.error(e);
log.error(e);
}
} catch (e: any) {
console.error(e.response.data);
log.error(e.response.data);
return;
}
@@ -293,7 +312,8 @@ export const monitorReleaseChanges = async () => {
}
}
} catch (e) {
console.error("Monitor error:", e);
console.error(e);
log.error({ error: e }, "Monitor error");
}
});
}
@@ -340,14 +360,14 @@ const getToken = async () => {
);
if (status === 400) {
console.error(data.message);
log.error(data.message);
return;
}
odToken = data.access_token;
console.info("Token added");
odToken = { odToken: data.access_token, tokenDate: new Date() };
log.info("Token added");
} catch (e) {
console.error(e);
log.error(e);
}
};

View File

@@ -0,0 +1,43 @@
import type { OpenAPIV3_1 } from "openapi-types";
export const cronerActiveJobs: OpenAPIV3_1.PathsObject = {
"/api/utils/croner": {
get: {
summary: "Cron jobs",
description: "Returns all jobs on the server.",
tags: ["Utils"],
responses: {
"200": {
description: "Jobs returned",
content: {
"application/json": {
schema: {
type: "object",
properties: {
status: {
type: "boolean",
format: "boolean",
example: true,
},
uptime: {
type: "number",
format: "3454.34",
example: 3454.34,
},
memoryUsage: {
type: "string",
format: "Heap: 11.62 MB / RSS: 86.31 MB",
},
sqlServerStats: {
type: "number",
format: "442127",
},
},
},
},
},
},
},
},
},
};

View File

@@ -1,10 +1,12 @@
import { createServer } from "node:http";
import os from "node:os";
import createApp from "./app.js";
import { dbCleanup } from "./db/dbCleanup.controller.js";
import { createLogger } from "./logger/logger.controller.js";
import { monitorReleaseChanges } from "./opendock/releaseMonitor.utils.js";
import { connectProdSql } from "./prodSql/prodSqlConnection.controller.js";
import { setupSocketIORoutes } from "./socket.io/serverSetup.js";
import { createCronJob } from "./utils/croner.utils.js";
const port = Number(process.env.PORT) || 3000;
@@ -17,6 +19,13 @@ const start = async () => {
// start long live processes
setTimeout(() => {
monitorReleaseChanges(); // this is od monitoring the db for all new releases
createCronJob("JobAuditLogCleanUp", "* 0 5 * * * *", () =>
dbCleanup("jobs", 30),
);
createCronJob("logsCleanup", "* 15 5 * * * *", () => dbCleanup("logs", 30));
createCronJob("opendockAptCleanup", "* 30 5 * * * *", () =>
dbCleanup("opendockApt", 90),
);
}, 5 * 1000);
const { app, baseUrl } = await createApp();

View File

@@ -1,7 +1,7 @@
import { jobAuditLog } from "backend/db/schema/auditLog.schema.js";
import { Cron } from "croner";
import { eq } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { jobAuditLog } from "../db/schema/auditLog.schema.js";
import { createLogger } from "../logger/logger.controller.js";
// example createJob
@@ -19,6 +19,17 @@ export interface JobInfo {
// Store running cronjobs
export const runningCrons: Record<string, Cron> = {};
// how to se the times
// * ┌──────────────── (optional) second (0 - 59) \n
// * │ ┌────────────── minute (0 - 59)
// * │ │ ┌──────────── hour (0 - 23)
// * │ │ │ ┌────────── day of month (1 - 31)
// * │ │ │ │ ┌──────── month (1 - 12, JAN-DEC)
// * │ │ │ │ │ ┌────── day of week (0 - 6, SUN-Mon)
// * │ │ │ │ │ │ (0 to 6 are Sunday to Saturday; 7 is Sunday, the same as 0)
// * │ │ │ │ │ │ ┌──── (optional) year (1 - 9999)
// * │ │ │ │ │ │ │
// * * 05 * * * * *
/**
*
* @param name Name of the job we want to run

View File

@@ -1,6 +1,6 @@
import { Router } from "express";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { getAllJobs, resumeCronJob, stopCronJob } from "./croner.utils.js";
import { apiReturn } from "./returnHelper.utils.js";
const r = Router();

View File

@@ -1,7 +1,6 @@
import type { Express } from "express";
import getActiveJobs from "./cronnerActiveJobs.route.js";
import jobStatusChange from "./cronnerStatusChange.js";
import getActiveJobs from "./cronerActiveJobs.route.js";
import jobStatusChange from "./cronerStatusChange.js";
export const setupUtilsRoutes = (baseUrl: string, app: Express) => {
app.use(`${baseUrl}/api/utils/croner`, getActiveJobs);
app.use(`${baseUrl}/api/utils/croner`, jobStatusChange);

View File

@@ -0,0 +1 @@
CREATE INDEX "idx_job_audit_logs_cleanup" ON "job_audit_log" USING btree ("start_at","id");

View File

@@ -0,0 +1,4 @@
ALTER TABLE "opendock.apt" RENAME TO "opendock_apt";--> statement-breakpoint
ALTER TABLE "opendock_apt" DROP CONSTRAINT "opendock.apt_release_unique";--> statement-breakpoint
ALTER TABLE "job_audit_log" ADD COLUMN "created_at" timestamp DEFAULT now();--> statement-breakpoint
ALTER TABLE "opendock_apt" ADD CONSTRAINT "opendock_apt_release_unique" UNIQUE("release");

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -85,6 +85,20 @@
"when": 1771515240318,
"tag": "0011_eminent_iron_patriot",
"breakpoints": true
},
{
"idx": 12,
"version": "7",
"when": 1771537852152,
"tag": "0012_fantastic_randall_flagg",
"breakpoints": true
},
{
"idx": 13,
"version": "7",
"when": 1771598443244,
"tag": "0013_flaky_quicksilver",
"breakpoints": true
}
]
}