feat(prodsqlconnection): added in prod connection with restart attempts and fail with notify

This commit is contained in:
2025-09-01 16:46:29 -05:00
parent bfb62df445
commit 083f38a079
11 changed files with 315 additions and 44 deletions

View File

@@ -6,22 +6,23 @@ import { printers } from "./internal/ocp/printers/printers.js";
import path, { dirname, join } from "path"; import path, { dirname, join } from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
import { db } from "./pkg/db/db.js"; import { db } from "./pkg/db/db.js";
import { settings } from "./pkg/db/schema/settings.js"; import { settings, type Setting } from "./pkg/db/schema/settings.js";
import { env } from "./pkg/utils/envValidator.js"; import { env } from "./pkg/utils/envValidator.js";
import { createLogger } from "./pkg/logger/logger.js"; import { createLogger } from "./pkg/logger/logger.js";
import { returnFunc } from "./pkg/utils/return.js";
import { initializeProdPool } from "./pkg/prodSql/prodSqlConnect.js";
import { tryCatch } from "./pkg/utils/tryCatch.js";
const PORT = Number(env.VITE_PORT) || 4200; const PORT = Number(env.VITE_PORT) || 4200;
const main = async () => { const main = async () => {
//create the logger //create the logger
const log = createLogger({ module: "system", subModule: "main start" }); const log = createLogger({ module: "system", subModule: "main start" });
// base path // base path
let basePath: string = ""; let basePath: string = "";
if ( if (process.env.NODE_ENV?.trim() !== "production") {
process.env.NODE_ENV?.trim() !== "production" &&
!env.RUNNING_IN_DOCKER
) {
basePath = "/lst"; basePath = "/lst";
} }
@@ -29,18 +30,34 @@ const main = async () => {
const __dirname = dirname(__filename); const __dirname = dirname(__filename);
// Db connection stuff // Db connection stuff
try { const res = await tryCatch(db.select().from(settings));
const set = await db.select().from(settings);
if (set.length === 0) { if (res.error) {
return log.fatal( return returnFunc({
{ notify: true }, success: false,
"Seems like the DB is not setup yet the app will close now" module: "system",
); level: "fatal",
} message: `Database lookup failed`,
} catch (error) { notify: false,
console.error("Error getting settings", error); data: [],
});
} }
if (res.data.length === 0) {
//return
returnFunc({
success: false,
module: "system",
level: "fatal",
message: `This seems to be the first time you have started the app please validate the settings have been intiated`,
notify: false,
data: [],
});
}
// connect to the prod sql
await initializeProdPool();
// express app // express app
const app = express(); const app = express();
@@ -82,10 +99,42 @@ const main = async () => {
}, on http://0.0.0.0:${PORT}${basePath}` }, on http://0.0.0.0:${PORT}${basePath}`
) )
); );
// Handle app exit signals
process.on("SIGINT", async () => {
console.log("\nGracefully shutting down...");
//await closePool();
process.exit(0);
});
process.on("SIGTERM", async () => {
console.log("Received termination signal, closing database...");
//await closePool();
process.exit(0);
});
process.on("uncaughtException", async (err) => {
console.log("Uncaught Exception:", err);
//await closePool();
// const emailData = {
// email: "blake.matthes@alpla.com", // should be moved to the db so it can be reused.
// subject: `${os.hostname()} has just encountered a crash.`,
// template: "serverCrash",
// context: {
// error: err,
// plant: `${os.hostname()}`,
// },
// };
// await sendEmail(emailData);
process.exit(1);
});
process.on("beforeExit", async () => {
console.log("Process is about to exit...");
//await closePool();
process.exit(0);
});
}; };
main().catch((err) => { main();
const log = createLogger({ module: "system", subModule: "main start" });
log.fatal("Startup error:", err);
process.exit(1);
});

View File

@@ -1,7 +1,8 @@
import { drizzle } from "drizzle-orm/postgres-js"; import { drizzle } from "drizzle-orm/postgres-js";
import postgres from "postgres"; import postgres from "postgres";
import { env } from "../utils/envValidator.js";
const dbURL = `postgres://${process.env.DATABASE_USER}:${process.env.DATABASE_PASSWORD}@${process.env.DATABASE_HOST}:${process.env.DATABASE_PORT}/${process.env.DATABASE_DB}`; const dbURL = `postgres://${env.DATABASE_USER}:${env.DATABASE_PASSWORD}@${env.DATABASE_HOST}:${env.DATABASE_PORT}/${env.DATABASE_DB}`;
const queryClient = postgres(dbURL, { const queryClient = postgres(dbURL, {
max: 10, max: 10,

View File

@@ -1,6 +1,7 @@
import build from "pino-abstract-transport"; import build from "pino-abstract-transport";
import { db } from "../db/db.js"; import { db } from "../db/db.js";
import { logs, type Log } from "../db/schema/logs.js"; import { logs, type Log } from "../db/schema/logs.js";
import { checkENV } from "../utils/envValidator.js";
const pinoLogLevels: any = { const pinoLogLevels: any = {
10: "trace", 10: "trace",

View File

@@ -3,10 +3,6 @@ import { env } from "../utils/envValidator.js";
export let logLevel = process.env.LOG_LEVEL || "info"; export let logLevel = process.env.LOG_LEVEL || "info";
interface CustomLogger extends pino.Logger {
specialMonitor: pino.LogFn;
}
const transport = pino.transport({ const transport = pino.transport({
targets: [ targets: [
{ {

View File

@@ -19,7 +19,7 @@ async function sendFatal(log: Log) {
{ {
title: `🚨 ${env.PROD_PLANT_TOKEN}: encounter a critical error `, title: `🚨 ${env.PROD_PLANT_TOKEN}: encounter a critical error `,
description: `Where was the error: ${log.module}${ description: `Where was the error: ${log.module}${
log.subModule ? `-${log.subModule}` : null log.subModule ? `-${log.subModule}` : ""
}`, }`,
color: 0xff0000, // red color: 0xff0000, // red
fields: [ fields: [
@@ -66,9 +66,15 @@ export default async function (log: Log) {
const newlog = { const newlog = {
level: levelName, level: levelName,
module: obj?.module.toLowerCase(), module: obj.module
subModule: obj?.subModule.toLowerCase(), ? String(obj.module).toLowerCase()
hostname: obj?.hostname.toLowerCase(), : undefined,
subModule: obj.subModule
? String(obj.subModule).toLowerCase()
: undefined,
hostname: obj.hostname
? String(obj.hostname).toLowerCase()
: undefined,
message: obj.msg, message: obj.msg,
}; };
if (!process.env.WEBHOOK_URL) { if (!process.env.WEBHOOK_URL) {

View File

@@ -0,0 +1,58 @@
import { env } from "../utils/envValidator.js";
import { returnFunc } from "../utils/return.js";
import { connected, pool } from "./prodSqlConnect.js";
/**
* Run a prod query
* just pass over the query as a string and the name of the query.
* Query should be like below.
* * select * from AlplaPROD_test1.dbo.table
* You must use test1 always as it will be changed via query
*/
export async function prodQuery(queryToRun: string, name: string) {
if (!connected) {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `The sql ${env.PROD_PLANT_TOKEN} is not connected`,
notify: false,
data: [],
});
}
const query = queryToRun.replaceAll("test1", env.PROD_PLANT_TOKEN);
try {
const result = await pool.request().query(query);
return {
success: true,
message: `Query results for: ${name}`,
data: result.recordset,
};
} catch (error: any) {
if (error.code === "ETIMEOUT") {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} did not run due to a timeout.`,
notify: false,
data: [error],
});
}
if (error.code === "EREQUEST") {
return returnFunc({
success: false,
module: "prodSql",
subModule: "query",
level: "error",
message: `${name} encoutnered an error ${error.originalError.info.message}`,
data: [],
});
}
}
}

View File

@@ -3,9 +3,11 @@ import { checkHostnamePort } from "../utils/checkHostNamePort.js";
import { sqlConfig } from "./prodSqlConfig.js"; import { sqlConfig } from "./prodSqlConfig.js";
import { env } from "../utils/envValidator.js"; import { env } from "../utils/envValidator.js";
import { createLogger } from "../logger/logger.js"; import { createLogger } from "../logger/logger.js";
import { returnFunc } from "../utils/return.js";
let pool; export let pool: any;
let connected: boolean = false; export let connected: boolean = false;
let reconnecting = false;
export const initializeProdPool = async () => { export const initializeProdPool = async () => {
const log = createLogger({ module: "prodSql" }); const log = createLogger({ module: "prodSql" });
@@ -13,36 +15,120 @@ export const initializeProdPool = async () => {
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`); const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
if (!serverUp) { if (!serverUp) {
log.error(`The sql ${process.env.PROD_SERVER} is not reachable`); reconnectToSql();
return { return returnFunc({
success: false, success: false,
module: "prodSql",
level: "fatal",
message: `The sql ${env.PROD_SERVER} is not reachable`, message: `The sql ${env.PROD_SERVER} is not reachable`,
data: [], data: [],
}; });
} }
// if you were restarting from the endpoint you get this lovely error // if you were restarting from the endpoint you get this lovely error
if (connected) { if (connected) {
log.error("There is already a connection."); return returnFunc({
return { success: false, message: "There is already a connection." }; success: false,
module: "prodSql",
level: "error",
message: `There is already a connection to ${env.PROD_PLANT_TOKEN}`,
data: [],
});
} }
try { try {
pool = sql.connect(sqlConfig); pool = sql.connect(sqlConfig);
log.info( log.info(
`Connected to ${sqlConfig?.server}, and looking at ${sqlConfig?.database}` `Connected to ${sqlConfig?.server}, using DB: ${sqlConfig?.database}`
); );
connected = true; connected = true;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) { } catch (error) {
log.fatal( log.fatal(
`${JSON.stringify( `${JSON.stringify(
error error
)}, "There was an error connecting to the pool."` )}, "There was an error connecting to the pool."`
); );
reconnectToSql();
// throw new Error("There was an error closing the sql connection"); // throw new Error("There was an error closing the sql connection");
} }
}; };
const reconnectToSql = async () => {
const log = createLogger({ module: "prodSql" });
if (reconnecting) return;
reconnecting = true;
let delay = 2000; // start at 2s
let attempts = 0;
const maxAttempts = 10; // or limit by time, e.g. 2 min total
while (!connected && attempts < maxAttempts) {
attempts++;
log.info(
`Reconnect attempt ${attempts}/${maxAttempts} in ${
delay / 1000
}s...`
);
await new Promise((res) => setTimeout(res, delay));
const serverUp = await checkHostnamePort(`${env.PROD_SERVER}:1433`);
if (!serverUp) {
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
continue;
}
try {
pool = sql.connect(sqlConfig);
log.info(
`Connected to ${sqlConfig?.server}, and looking at ${sqlConfig?.database}`
);
reconnecting = false;
connected = true;
} catch (error) {
log.fatal(
`${JSON.stringify(
error
)}, "There was an error connecting to the pool."`
);
delay = Math.min(delay * 2, 30000); // exponential backoff up to 30s
// throw new Error("There was an error closing the sql connection");
}
}
if (!connected) {
log.fatal(
{ notify: true },
"Max reconnect attempts reached on the prodSql server. Stopping retries."
);
reconnecting = false;
// optional: exit process or alert someone here
// process.exit(1);
}
};
export const closePool = async () => {
const log = createLogger({ module: "prodSql" });
if (!connected) {
log.error("There is no connection a connection.");
return { success: false, message: "There is already a connection." };
}
try {
await pool.close();
log.info("Connection pool closed");
connected = false;
return {
success: true,
message: "The sql server connection has been closed",
};
} catch (error) {
log.fatal(
{ notify: true },
`${JSON.stringify(
error
)}, "There was an error closing the sql connection"`
);
}
};

View File

@@ -19,13 +19,15 @@ const envSchema = z.object({
PROD_PLANT_TOKEN: z.string(), PROD_PLANT_TOKEN: z.string(),
PROD_USER: z.string(), PROD_USER: z.string(),
PROD_PASSWORD: z.string(), PROD_PASSWORD: z.string(),
//docker specifics
RUNNING_IN_DOCKER: z.boolean().default(false),
}); });
// use safeParse instead of parse // use safeParse instead of parse
const parsed = envSchema.safeParse(process.env); const parsed = envSchema.safeParse(process.env);
export const checkENV = () => {
return envSchema.safeParse(process.env);
};
const log = createLogger({ module: "envValidation" }); const log = createLogger({ module: "envValidation" });
if (!parsed.success) { if (!parsed.success) {

View File

@@ -0,0 +1,43 @@
import { createLogger } from "../logger/logger.js";
interface Data {
success: boolean;
module: string;
subModule?: string;
level: "info" | "error" | "debug" | "fatal";
message: string;
data: unknown[];
notify?: boolean;
}
/**
* This dose the return process and log at the same time, vs needing to log and return at the same time.
* When to use.
* * APIs
* * actual returns and needing to stop.
* @param Data
* @returns
*/
export const returnFunc = (data: Data) => {
const log = createLogger({ module: data.module });
switch (data.level) {
case "info":
log.info({ notify: data.notify }, data.message);
break;
case "error":
log.error({ notify: data.notify }, data.message);
break;
case "debug":
log.debug({ notify: data.notify }, data.message);
break;
case "fatal":
log.fatal({ notify: data.notify }, data.message);
}
return {
success: data.success,
message: data.message,
data: [],
};
};

View File

@@ -0,0 +1,28 @@
// The "container" types
type Success<T> = { data: T; error: null };
type Failure<E> = { data: null; error: E };
export type Result<T, E = Error> = Success<T> | Failure<E>;
/**
* A universal tryCatch wrapper that:
* - Never throws
* - Always resolves to Result<T,E>
* - Allows optional error mapping function for strong typing
*/
export async function tryCatch<T, E = Error>(
promise: Promise<T>,
onError?: (error: unknown) => E
): Promise<Result<T, E>> {
try {
const data = await promise;
return { data, error: null };
} catch (err: unknown) {
const error = onError
? onError(err)
: err instanceof Error
? (err as E)
: (new Error(String(err)) as E);
return { data: null, error };
}
}

View File

@@ -18,8 +18,9 @@
"install:front": "cd frontend && npm i", "install:front": "cd frontend && npm i",
"install:docs": "cd lstDocs && npm i", "install:docs": "cd lstDocs && npm i",
"install:app": "npm i", "install:app": "npm i",
"start:app": "set NODE_ENV=production && node dist/main.js", "start:app": "node dist/main.js",
"start": "dotenvx run -f .env -- npm run start:app", "start": "dotenvx run -f .env -- npm run start:app",
"start:win": "set NODE_ENV=production && node dist/main.js",
"docker": "docker compose up --build --force-recreate -d", "docker": "docker compose up --build --force-recreate -d",
"commit": "cz", "commit": "cz",
"deploy": "standard-version --conventional-commits && npm run build", "deploy": "standard-version --conventional-commits && npm run build",