reafactored data mart and added better job monitor

This commit is contained in:
2026-02-19 13:20:20 -06:00
parent 76503f558b
commit 597d990a69
29 changed files with 2857 additions and 621 deletions

View File

@@ -13,49 +13,72 @@
*
* when a criteria is password over we will handle it by counting how many were passed up to 3 then deal with each one respectively
*/
import { eq } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../prodSql/prodSqlQuerySelector.utils.js";
import { returnFunc } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
import { datamartData } from "./datamartData.utlis.js";
type Options = {
name: string;
value: string;
};
type Data = {
name: string;
options: string;
options: Options;
optionsRequired?: boolean;
howManyOptionsRequired?: number;
};
export const runDatamartQuery = async (data: Data) => {
// search the query db for the query by name
const { data: queryInfo, error: qIe } = await tryCatch(
db.select().from(datamart).where(eq(datamart.name, data.name)),
);
const sqlQuery = sqlQuerySelector(`${data.name}`) as SqlQuery;
if (qIe) {
const getDataMartInfo = datamartData.filter((x) => x.endpoint === data.name);
// const optionsMissing =
// !data.options || Object.keys(data.options).length === 0;
const optionCount =
Object.keys(data.options).length ===
getDataMartInfo[0]?.howManyOptionsRequired;
if (getDataMartInfo[0]?.optionsRequired && !optionCount) {
return returnFunc({
success: false,
level: "error",
module: "datamart",
subModule: "query",
message: `This query is required to have the ${getDataMartInfo[0]?.howManyOptionsRequired} options set in order use it.`,
data: [getDataMartInfo[0].options],
notify: false,
});
}
if (!sqlQuery.success) {
return returnFunc({
success: false,
level: "error",
module: "datamart",
subModule: "query",
message: `Error getting ${data.name} info`,
data: [qIe],
data: [sqlQuery.message],
notify: false,
});
}
// create the query with no changed just to have it here
let datamartQuery = queryInfo[0]?.query || "";
let datamartQuery = sqlQuery?.query || "";
// split the criteria by "," then and then update the query
if (data.options !== "") {
const params = new URLSearchParams(data.options);
for (const [rawKey, rawValue] of params.entries()) {
const key = rawKey.trim();
const value = rawValue.trim();
datamartQuery = datamartQuery.replaceAll(`[${key}]`, value);
}
if (data.options) {
Object.entries(data.options ?? {}).forEach(([key, value]) => {
const pattern = new RegExp(`\\[${key.trim()}\\]`, "g");
datamartQuery = datamartQuery.replace(pattern, String(value).trim());
});
}
const { data: queryRun, error } = await tryCatch(

View File

@@ -1,69 +1,60 @@
import { and, eq, gte, sql } from "drizzle-orm";
import type { Express } from "express";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
import addQuery from "./datamartAdd.route.js";
import updateQuery from "./datamartUpdate.route.js";
import { datamartData } from "./datamartData.utlis.js";
import runQuery from "./getDatamart.route.js";
export const setupDatamartRoutes = (baseUrl: string, app: Express) => {
// the sync callback.
app.get(`${baseUrl}/api/datamart/sync`, async (req, res) => {
const { time } = req.query;
const now = new Date();
// app.get(`${baseUrl}/api/datamart/sync`, async (req, res) => {
// const { time } = req.query;
// const now = new Date();
const minutes = parseInt(time as string, 10) || 15;
const cutoff = new Date(now.getTime() - minutes * 60 * 1000);
// const minutes = parseInt(time as string, 10) || 15;
// const cutoff = new Date(now.getTime() - minutes * 60 * 1000);
const results = await db
.select()
.from(datamart)
.where(time ? gte(datamart.upd_date, cutoff) : sql`true`);
// const results = await db
// .select()
// .from(datamart)
// .where(time ? gte(datamart.upd_date, cutoff) : sql`true`);
// return apiReturn(res, {
// success: true,
// level: "info",
// module: "datamart",
// subModule: "query",
// message: `All Queries older than ${parseInt(process.env.QUERY_CHECK?.trim() || "15", 10)}min `,
// data: results,
// status: 200,
// });
// });
//setup all the routes
app.use(`${baseUrl}/api/datamart`, runQuery);
// just sending a get on datamart will return all the queries that we can call.
app.get(`${baseUrl}/api/datamart`, async (_, res) => {
// const queries = await db
// .select({
// id: datamart.id,
// name: datamart.name,
// description: datamart.description,
// options: datamart.options,
// version: datamart.version,
// upd_date: datamart.upd_date,
// })
// .from(datamart)
// .where(and(eq(datamart.active, true), eq(datamart.public, true)));
return apiReturn(res, {
success: true,
level: "info",
module: "datamart",
subModule: "query",
message: `All Queries older than ${parseInt(process.env.QUERY_CHECK?.trim() || "15", 10)}min `,
data: results,
message: "All active queries we can run",
data: datamartData,
status: 200,
});
});
//setup all the routes
app.use(`${baseUrl}/api/datamart`, runQuery);
app.use(`${baseUrl}/api/datamart`, addQuery);
app.use(`${baseUrl}/api/datamart`, updateQuery);
// just sending a get on datamart will return all the queries that we can call.
app.get(`${baseUrl}/api/datamart`, async (_, res) => {
const queries = await db
.select({
id: datamart.id,
name: datamart.name,
description: datamart.description,
options: datamart.options,
version: datamart.version,
upd_date: datamart.upd_date,
})
.from(datamart)
.where(and(eq(datamart.active, true), eq(datamart.public, true)));
return apiReturn(
res,
{
success: true,
level: "info",
module: "datamart",
subModule: "query",
message: "All active queries we can run",
data: queries,
status: 200,
},
{ sheetName: 3 },
);
});
};

View File

@@ -1,125 +0,0 @@
import fs from "node:fs";
import { Router } from "express";
import multer from "multer";
import z from "zod";
import { db } from "../db/db.controller.js";
import { datamart, type NewDatamart } from "../db/schema/datamart.schema.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
const r = Router();
const upload = multer({ dest: "uploads/" });
const newQuery = z.object({
name: z.string().min(5),
description: z.string().min(30),
query: z.string().min(10).optional(),
options: z
.string()
.describe("This should be a set of keys separated by a comma")
.optional(),
});
r.post("/", upload.single("queryFile"), async (req, res) => {
try {
const v = newQuery.parse(req.body);
const query: NewDatamart = {
...v,
name: v.name?.trim().replaceAll(" ", "_"),
};
//console.log(query);
if (req.file) {
const sqlContents = fs.readFileSync(req.file.path, "utf8");
query.query = sqlContents;
// optional: delete temp file afterwards
fs.unlink(req.file.path, () => {});
}
// if we forget the file crash out
if (!query.query) {
// no query text anywhere
return apiReturn(res, {
success: true,
level: "info", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} missing sql file to parse`,
data: [],
status: 400, //connect.success ? 200 : 400,
});
}
// // if we didn't replace the test1 stuff crash out
// if (!query.query.includes("test1")) {
// return apiReturn(res, {
// success: true,
// level: "info", //connect.success ? "info" : "error",
// module: "routes",
// subModule: "datamart",
// message:
// "Query must include the 'test1' or everything switched to test1",
// data: [],
// status: 400, //connect.success ? 200 : 400,
// });
// }
const { data, error } = await tryCatch(db.insert(datamart).values(query));
if (error) {
return apiReturn(res, {
success: true,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} encountered an error while being added`,
data: [error.cause],
status: 200, //connect.success ? 200 : 400,
});
}
if (data) {
return apiReturn(res, {
success: true,
level: "info", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} was just added`,
data: [query],
status: 200, //connect.success ? 200 : 400,
});
}
} catch (err) {
if (err instanceof z.ZodError) {
const flattened = z.flattenError(err);
// return res.status(400).json({
// error: "Validation failed",
// details: flattened,
// });
return apiReturn(res, {
success: false,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "auth",
message: "Validation failed",
data: [flattened],
status: 400, //connect.success ? 200 : 400,
});
}
return apiReturn(res, {
success: false,
level: "error",
module: "routes",
subModule: "datamart",
message: "There was an error creating the new query",
data: [err],
status: 200,
});
}
});
export default r;

View File

@@ -0,0 +1,24 @@
/**
* will store and maintain all queries for datamart here.
* this way they can all be easily maintained and updated as we progress with the changes and updates to v3
*
* for options when putting them into the docs we will show examples on how to pull this
*/
export const datamartData = [
{
name: "Active articles",
endpoint: "activeArticles",
description: "returns all active articles for the server with custom data",
options: "", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
optionsRequired: false,
},
{
name: "Delivery by date range",
endpoint: "deliveryByDateRange",
description: `Returns all Deliverys in selected date range IE: 1/1/${new Date(Date.now()).getFullYear()} to 1/31/${new Date(Date.now()).getFullYear()}`,
options: "startDate,endDate", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
optionsRequired: true,
howManyOptionsRequired: 2,
},
];

View File

@@ -1,129 +0,0 @@
/**
* If we are running in client mode we want to periodically check the SERVER_NAME for new/updates queries
* this will be on a croner job, we will check 2 times a day for new data, we will also have a route we can trigger to check this manually in case we have
* queries we make for one plant but will eventually go to all plants.
* in client mode we will not be able to add, update, or delete, or push updates
*
* if we are running on server mode we will provide all queries.
* when pushing to another server we will allow all or just a single server by plant token.
* allow for new queries to be added
* allow for queries to be updated by id
* table will be
* id
* name
* description
* query
* version
* active
* options (string ie start,end)
* add_date
* add_user
* upd_date
* upd_user
*
* if we are running in localhost or dev or just someone running the server on there computer but using localhost we will allow to push to the main server the SERVER_NAME in the env should point to the main server
* that way when we check if we are in production we will know.
* the node env must also be set non production in order to push to the main server.
* we will also be able to do all the same as the server mode but the push here will just go to the main server.
*/
import axios from "axios";
import { count, sql } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { createLogger } from "../logger/logger.controller.js";
import { createCronJob } from "../utils/croner.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
// doing the client stuff first
// ┌──────────────── (optional) second (0 - 59)
// │ ┌────────────── minute (0 - 59)
// │ │ ┌──────────── hour (0 - 23)
// │ │ │ ┌────────── day of month (1 - 31)
// │ │ │ │ ┌──────── month (1 - 12, JAN-DEC)
// │ │ │ │ │ ┌────── day of week (0 - 6, SUN-Mon)
// │ │ │ │ │ │ (0 to 6 are Sunday to Saturday; 7 is Sunday, the same as 0)
// │ │ │ │ │ │
// * * * * * *
export const startDatamartSync = async () => {
// setup cronner
let cronTime = "*/5 * * * *";
if (process.env.QUERY_TIME_TYPE === "m") {
// will run this cron ever x
cronTime = `*/${process.env.QUERY_CHECK} * * * *`;
}
if (process.env.QUERY_TIME_TYPE === "h") {
// will run this cron ever x
cronTime = `* */${process.env.QUERY_CHECK} * * *`;
}
// if we are in client mode and in production we run the test to see whats new in the last x
if (
process.env.NODE_ENV?.trim() === "production" &&
process.env.APP_RUNNING_IN?.trim() === "client"
) {
createCronJob("dataMartSync", cronTime, async () => {
const log = createLogger({ module: "system", subModule: "croner" });
const syncTimeToCheck: number = parseInt(
process.env.QUERY_CHECK?.trim() || "5",
10,
);
let url = `http://${process.env.SERVER_NAME?.trim()}:3000/lst/api/datamart/sync?time=${syncTimeToCheck}`;
// validate how many querys we have
const qCount = await db.select({ count: count() }).from(datamart);
// if we dont have any queries change to a crazy amount of time
console.info(qCount[0]?.count);
if ((qCount[0]?.count || 0) < 0) {
url = `http://${process.env.SERVER_NAME?.trim()}:3000/lst/api/datamart/sync`;
}
const { data, error } = await tryCatch(axios.get(url));
if (error !== null) {
log.error(
{ error: error.message },
`There was an error getting the new queries.`,
);
return;
}
//what will we do with the new data passed over
log.info({ data: data.data }, `There are to be updated`);
const queries = data.data.data;
if (queries.length === 0) return;
const { data: updateQ, error: UpdateQError } = await tryCatch(
db
.insert(datamart)
.values(queries)
.onConflictDoUpdate({
target: datamart.id,
set: {
name: sql.raw(`excluded.${datamart.name}`),
description: sql.raw(`excluded.${datamart.description}`),
query: sql.raw(`excluded.${datamart.query}`),
version: sql.raw(`excluded.${datamart.version}`),
active: sql.raw(`excluded.${datamart.active}`),
options: sql.raw(`excluded.${datamart.options}`),
public: sql.raw(`excluded.${datamart.public}`),
},
}),
);
if (UpdateQError !== null) {
log.error(
{ error: UpdateQError },
"There was an error add/updating the queries",
);
}
if (updateQ) {
log.info({}, "New and updated queries have been added");
}
});
}
};

View File

@@ -1,163 +0,0 @@
import fs from "node:fs";
import { eq, sql } from "drizzle-orm";
import { Router } from "express";
import multer from "multer";
import z from "zod";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
const r = Router();
const upload = multer({ dest: "uploads/" });
const newQuery = z.object({
name: z.string().min(5).optional(),
description: z.string().min(30).optional(),
query: z.string().min(10).optional(),
options: z
.string()
.describe("This should be a set of keys separated by a comma")
.optional(),
setActive: z.string().optional(),
active: z.boolean().optional(),
setPublicActive: z.string().optional(),
public: z.boolean().optional(),
});
r.patch("/:id", upload.single("queryFile"), async (req, res) => {
const { id } = req.params;
try {
const v = newQuery.parse(req.body);
const query = {
...v,
};
//console.log(query);
if (req.file) {
const sqlContents = fs.readFileSync(req.file.path, "utf8");
query.query = sqlContents;
// optional: delete temp file afterwards
fs.unlink(req.file.path, () => {});
}
if (v.name) {
query.name = v.name.trim().replaceAll(" ", "_");
}
if (v.description) {
query.options = v.description;
}
if (v.options) {
query.options = v.options;
}
if (v.setActive) {
query.active = v.setActive === "true";
}
if (v.setPublicActive) {
query.public = v.setPublicActive === "true";
}
// if we forget the file crash out
// if (!query.query) {
// // no query text anywhere
// return apiReturn(res, {
// success: true,
// level: "info", //connect.success ? "info" : "error",
// module: "routes",
// subModule: "datamart",
// message: `${query.name} missing sql file to parse`,
// data: [],
// status: 400, //connect.success ? 200 : 400,
// });
// }
// // if we didn't replace the test1 stuff crash out
if (query.query && !query.query.includes("test1")) {
return apiReturn(res, {
success: true,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message:
"All queries must point to test1 this way we can keep it dynamic.",
data: [],
status: 400, //connect.success ? 200 : 400,
});
}
const { data, error } = await tryCatch(
db
.update(datamart)
.set({
...query,
version: sql`${datamart.version} + 1`,
upd_date: sql`NOW()`,
upd_user: "lst_user",
})
.where(eq(datamart.id, id as string))
.returning({ name: datamart.name }),
);
if (error) {
return apiReturn(res, {
success: true,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} encountered an error while being updated`,
data: [error.cause],
status: 200, //connect.success ? 200 : 400,
});
}
if (data) {
return apiReturn(res, {
success: true,
level: "info", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${data[0]?.name} was just updated`,
data: [],
status: 200, //connect.success ? 200 : 400,
});
}
} catch (err) {
if (err instanceof z.ZodError) {
const flattened = z.flattenError(err);
// return res.status(400).json({
// error: "Validation failed",
// details: flattened,
// });
return apiReturn(res, {
success: false,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "auth",
message: "Validation failed",
data: [flattened],
status: 400, //connect.success ? 200 : 400,
});
}
return apiReturn(res, {
success: false,
level: "error",
module: "routes",
subModule: "datamart",
message: "There was an error updating the query",
data: [err],
status: 200,
});
}
});
export default r;

View File

@@ -4,11 +4,14 @@ import { runDatamartQuery } from "./datamart.controller.js";
const r = Router();
type Options = {
name: string;
value: string;
};
r.get("/:name", async (req, res) => {
const { name } = req.params;
const options = new URLSearchParams(
req.query as Record<string, string>,
).toString();
const options = req.query as Options;
const dataRan = await runDatamartQuery({ name, options });
return apiReturn(res, {