reafactored data mart and added better job monitor

This commit is contained in:
2026-02-19 13:20:20 -06:00
parent 76503f558b
commit 597d990a69
29 changed files with 2857 additions and 621 deletions

View File

@@ -54,6 +54,7 @@
"alplaprod",
"Datamart",
"intiallally",
"manadatory",
"OCME",
"onnotice",
"ppoo",

View File

@@ -13,49 +13,72 @@
*
* when a criteria is password over we will handle it by counting how many were passed up to 3 then deal with each one respectively
*/
import { eq } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../prodSql/prodSqlQuerySelector.utils.js";
import { returnFunc } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
import { datamartData } from "./datamartData.utlis.js";
type Options = {
name: string;
value: string;
};
type Data = {
name: string;
options: string;
options: Options;
optionsRequired?: boolean;
howManyOptionsRequired?: number;
};
export const runDatamartQuery = async (data: Data) => {
// search the query db for the query by name
const { data: queryInfo, error: qIe } = await tryCatch(
db.select().from(datamart).where(eq(datamart.name, data.name)),
);
const sqlQuery = sqlQuerySelector(`${data.name}`) as SqlQuery;
if (qIe) {
const getDataMartInfo = datamartData.filter((x) => x.endpoint === data.name);
// const optionsMissing =
// !data.options || Object.keys(data.options).length === 0;
const optionCount =
Object.keys(data.options).length ===
getDataMartInfo[0]?.howManyOptionsRequired;
if (getDataMartInfo[0]?.optionsRequired && !optionCount) {
return returnFunc({
success: false,
level: "error",
module: "datamart",
subModule: "query",
message: `This query is required to have the ${getDataMartInfo[0]?.howManyOptionsRequired} options set in order use it.`,
data: [getDataMartInfo[0].options],
notify: false,
});
}
if (!sqlQuery.success) {
return returnFunc({
success: false,
level: "error",
module: "datamart",
subModule: "query",
message: `Error getting ${data.name} info`,
data: [qIe],
data: [sqlQuery.message],
notify: false,
});
}
// create the query with no changed just to have it here
let datamartQuery = queryInfo[0]?.query || "";
let datamartQuery = sqlQuery?.query || "";
// split the criteria by "," then and then update the query
if (data.options !== "") {
const params = new URLSearchParams(data.options);
for (const [rawKey, rawValue] of params.entries()) {
const key = rawKey.trim();
const value = rawValue.trim();
datamartQuery = datamartQuery.replaceAll(`[${key}]`, value);
}
if (data.options) {
Object.entries(data.options ?? {}).forEach(([key, value]) => {
const pattern = new RegExp(`\\[${key.trim()}\\]`, "g");
datamartQuery = datamartQuery.replace(pattern, String(value).trim());
});
}
const { data: queryRun, error } = await tryCatch(

View File

@@ -1,69 +1,60 @@
import { and, eq, gte, sql } from "drizzle-orm";
import type { Express } from "express";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
import addQuery from "./datamartAdd.route.js";
import updateQuery from "./datamartUpdate.route.js";
import { datamartData } from "./datamartData.utlis.js";
import runQuery from "./getDatamart.route.js";
export const setupDatamartRoutes = (baseUrl: string, app: Express) => {
// the sync callback.
app.get(`${baseUrl}/api/datamart/sync`, async (req, res) => {
const { time } = req.query;
const now = new Date();
// app.get(`${baseUrl}/api/datamart/sync`, async (req, res) => {
// const { time } = req.query;
// const now = new Date();
const minutes = parseInt(time as string, 10) || 15;
const cutoff = new Date(now.getTime() - minutes * 60 * 1000);
// const minutes = parseInt(time as string, 10) || 15;
// const cutoff = new Date(now.getTime() - minutes * 60 * 1000);
const results = await db
.select()
.from(datamart)
.where(time ? gte(datamart.upd_date, cutoff) : sql`true`);
// const results = await db
// .select()
// .from(datamart)
// .where(time ? gte(datamart.upd_date, cutoff) : sql`true`);
// return apiReturn(res, {
// success: true,
// level: "info",
// module: "datamart",
// subModule: "query",
// message: `All Queries older than ${parseInt(process.env.QUERY_CHECK?.trim() || "15", 10)}min `,
// data: results,
// status: 200,
// });
// });
//setup all the routes
app.use(`${baseUrl}/api/datamart`, runQuery);
// just sending a get on datamart will return all the queries that we can call.
app.get(`${baseUrl}/api/datamart`, async (_, res) => {
// const queries = await db
// .select({
// id: datamart.id,
// name: datamart.name,
// description: datamart.description,
// options: datamart.options,
// version: datamart.version,
// upd_date: datamart.upd_date,
// })
// .from(datamart)
// .where(and(eq(datamart.active, true), eq(datamart.public, true)));
return apiReturn(res, {
success: true,
level: "info",
module: "datamart",
subModule: "query",
message: `All Queries older than ${parseInt(process.env.QUERY_CHECK?.trim() || "15", 10)}min `,
data: results,
message: "All active queries we can run",
data: datamartData,
status: 200,
});
});
//setup all the routes
app.use(`${baseUrl}/api/datamart`, runQuery);
app.use(`${baseUrl}/api/datamart`, addQuery);
app.use(`${baseUrl}/api/datamart`, updateQuery);
// just sending a get on datamart will return all the queries that we can call.
app.get(`${baseUrl}/api/datamart`, async (_, res) => {
const queries = await db
.select({
id: datamart.id,
name: datamart.name,
description: datamart.description,
options: datamart.options,
version: datamart.version,
upd_date: datamart.upd_date,
})
.from(datamart)
.where(and(eq(datamart.active, true), eq(datamart.public, true)));
return apiReturn(
res,
{
success: true,
level: "info",
module: "datamart",
subModule: "query",
message: "All active queries we can run",
data: queries,
status: 200,
},
{ sheetName: 3 },
);
});
};

View File

@@ -1,125 +0,0 @@
import fs from "node:fs";
import { Router } from "express";
import multer from "multer";
import z from "zod";
import { db } from "../db/db.controller.js";
import { datamart, type NewDatamart } from "../db/schema/datamart.schema.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
const r = Router();
const upload = multer({ dest: "uploads/" });
const newQuery = z.object({
name: z.string().min(5),
description: z.string().min(30),
query: z.string().min(10).optional(),
options: z
.string()
.describe("This should be a set of keys separated by a comma")
.optional(),
});
r.post("/", upload.single("queryFile"), async (req, res) => {
try {
const v = newQuery.parse(req.body);
const query: NewDatamart = {
...v,
name: v.name?.trim().replaceAll(" ", "_"),
};
//console.log(query);
if (req.file) {
const sqlContents = fs.readFileSync(req.file.path, "utf8");
query.query = sqlContents;
// optional: delete temp file afterwards
fs.unlink(req.file.path, () => {});
}
// if we forget the file crash out
if (!query.query) {
// no query text anywhere
return apiReturn(res, {
success: true,
level: "info", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} missing sql file to parse`,
data: [],
status: 400, //connect.success ? 200 : 400,
});
}
// // if we didn't replace the test1 stuff crash out
// if (!query.query.includes("test1")) {
// return apiReturn(res, {
// success: true,
// level: "info", //connect.success ? "info" : "error",
// module: "routes",
// subModule: "datamart",
// message:
// "Query must include the 'test1' or everything switched to test1",
// data: [],
// status: 400, //connect.success ? 200 : 400,
// });
// }
const { data, error } = await tryCatch(db.insert(datamart).values(query));
if (error) {
return apiReturn(res, {
success: true,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} encountered an error while being added`,
data: [error.cause],
status: 200, //connect.success ? 200 : 400,
});
}
if (data) {
return apiReturn(res, {
success: true,
level: "info", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} was just added`,
data: [query],
status: 200, //connect.success ? 200 : 400,
});
}
} catch (err) {
if (err instanceof z.ZodError) {
const flattened = z.flattenError(err);
// return res.status(400).json({
// error: "Validation failed",
// details: flattened,
// });
return apiReturn(res, {
success: false,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "auth",
message: "Validation failed",
data: [flattened],
status: 400, //connect.success ? 200 : 400,
});
}
return apiReturn(res, {
success: false,
level: "error",
module: "routes",
subModule: "datamart",
message: "There was an error creating the new query",
data: [err],
status: 200,
});
}
});
export default r;

View File

@@ -0,0 +1,24 @@
/**
* will store and maintain all queries for datamart here.
* this way they can all be easily maintained and updated as we progress with the changes and updates to v3
*
* for options when putting them into the docs we will show examples on how to pull this
*/
export const datamartData = [
{
name: "Active articles",
endpoint: "activeArticles",
description: "returns all active articles for the server with custom data",
options: "", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
optionsRequired: false,
},
{
name: "Delivery by date range",
endpoint: "deliveryByDateRange",
description: `Returns all Deliverys in selected date range IE: 1/1/${new Date(Date.now()).getFullYear()} to 1/31/${new Date(Date.now()).getFullYear()}`,
options: "startDate,endDate", // set as a string and each item will be seperated by a , this way we can split it later in the excel file.
optionsRequired: true,
howManyOptionsRequired: 2,
},
];

View File

@@ -1,129 +0,0 @@
/**
* If we are running in client mode we want to periodically check the SERVER_NAME for new/updates queries
* this will be on a croner job, we will check 2 times a day for new data, we will also have a route we can trigger to check this manually in case we have
* queries we make for one plant but will eventually go to all plants.
* in client mode we will not be able to add, update, or delete, or push updates
*
* if we are running on server mode we will provide all queries.
* when pushing to another server we will allow all or just a single server by plant token.
* allow for new queries to be added
* allow for queries to be updated by id
* table will be
* id
* name
* description
* query
* version
* active
* options (string ie start,end)
* add_date
* add_user
* upd_date
* upd_user
*
* if we are running in localhost or dev or just someone running the server on there computer but using localhost we will allow to push to the main server the SERVER_NAME in the env should point to the main server
* that way when we check if we are in production we will know.
* the node env must also be set non production in order to push to the main server.
* we will also be able to do all the same as the server mode but the push here will just go to the main server.
*/
import axios from "axios";
import { count, sql } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { createLogger } from "../logger/logger.controller.js";
import { createCronJob } from "../utils/croner.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
// doing the client stuff first
// ┌──────────────── (optional) second (0 - 59)
// │ ┌────────────── minute (0 - 59)
// │ │ ┌──────────── hour (0 - 23)
// │ │ │ ┌────────── day of month (1 - 31)
// │ │ │ │ ┌──────── month (1 - 12, JAN-DEC)
// │ │ │ │ │ ┌────── day of week (0 - 6, SUN-Mon)
// │ │ │ │ │ │ (0 to 6 are Sunday to Saturday; 7 is Sunday, the same as 0)
// │ │ │ │ │ │
// * * * * * *
export const startDatamartSync = async () => {
// setup cronner
let cronTime = "*/5 * * * *";
if (process.env.QUERY_TIME_TYPE === "m") {
// will run this cron ever x
cronTime = `*/${process.env.QUERY_CHECK} * * * *`;
}
if (process.env.QUERY_TIME_TYPE === "h") {
// will run this cron ever x
cronTime = `* */${process.env.QUERY_CHECK} * * *`;
}
// if we are in client mode and in production we run the test to see whats new in the last x
if (
process.env.NODE_ENV?.trim() === "production" &&
process.env.APP_RUNNING_IN?.trim() === "client"
) {
createCronJob("dataMartSync", cronTime, async () => {
const log = createLogger({ module: "system", subModule: "croner" });
const syncTimeToCheck: number = parseInt(
process.env.QUERY_CHECK?.trim() || "5",
10,
);
let url = `http://${process.env.SERVER_NAME?.trim()}:3000/lst/api/datamart/sync?time=${syncTimeToCheck}`;
// validate how many querys we have
const qCount = await db.select({ count: count() }).from(datamart);
// if we dont have any queries change to a crazy amount of time
console.info(qCount[0]?.count);
if ((qCount[0]?.count || 0) < 0) {
url = `http://${process.env.SERVER_NAME?.trim()}:3000/lst/api/datamart/sync`;
}
const { data, error } = await tryCatch(axios.get(url));
if (error !== null) {
log.error(
{ error: error.message },
`There was an error getting the new queries.`,
);
return;
}
//what will we do with the new data passed over
log.info({ data: data.data }, `There are to be updated`);
const queries = data.data.data;
if (queries.length === 0) return;
const { data: updateQ, error: UpdateQError } = await tryCatch(
db
.insert(datamart)
.values(queries)
.onConflictDoUpdate({
target: datamart.id,
set: {
name: sql.raw(`excluded.${datamart.name}`),
description: sql.raw(`excluded.${datamart.description}`),
query: sql.raw(`excluded.${datamart.query}`),
version: sql.raw(`excluded.${datamart.version}`),
active: sql.raw(`excluded.${datamart.active}`),
options: sql.raw(`excluded.${datamart.options}`),
public: sql.raw(`excluded.${datamart.public}`),
},
}),
);
if (UpdateQError !== null) {
log.error(
{ error: UpdateQError },
"There was an error add/updating the queries",
);
}
if (updateQ) {
log.info({}, "New and updated queries have been added");
}
});
}
};

View File

@@ -1,163 +0,0 @@
import fs from "node:fs";
import { eq, sql } from "drizzle-orm";
import { Router } from "express";
import multer from "multer";
import z from "zod";
import { db } from "../db/db.controller.js";
import { datamart } from "../db/schema/datamart.schema.js";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
const r = Router();
const upload = multer({ dest: "uploads/" });
const newQuery = z.object({
name: z.string().min(5).optional(),
description: z.string().min(30).optional(),
query: z.string().min(10).optional(),
options: z
.string()
.describe("This should be a set of keys separated by a comma")
.optional(),
setActive: z.string().optional(),
active: z.boolean().optional(),
setPublicActive: z.string().optional(),
public: z.boolean().optional(),
});
r.patch("/:id", upload.single("queryFile"), async (req, res) => {
const { id } = req.params;
try {
const v = newQuery.parse(req.body);
const query = {
...v,
};
//console.log(query);
if (req.file) {
const sqlContents = fs.readFileSync(req.file.path, "utf8");
query.query = sqlContents;
// optional: delete temp file afterwards
fs.unlink(req.file.path, () => {});
}
if (v.name) {
query.name = v.name.trim().replaceAll(" ", "_");
}
if (v.description) {
query.options = v.description;
}
if (v.options) {
query.options = v.options;
}
if (v.setActive) {
query.active = v.setActive === "true";
}
if (v.setPublicActive) {
query.public = v.setPublicActive === "true";
}
// if we forget the file crash out
// if (!query.query) {
// // no query text anywhere
// return apiReturn(res, {
// success: true,
// level: "info", //connect.success ? "info" : "error",
// module: "routes",
// subModule: "datamart",
// message: `${query.name} missing sql file to parse`,
// data: [],
// status: 400, //connect.success ? 200 : 400,
// });
// }
// // if we didn't replace the test1 stuff crash out
if (query.query && !query.query.includes("test1")) {
return apiReturn(res, {
success: true,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message:
"All queries must point to test1 this way we can keep it dynamic.",
data: [],
status: 400, //connect.success ? 200 : 400,
});
}
const { data, error } = await tryCatch(
db
.update(datamart)
.set({
...query,
version: sql`${datamart.version} + 1`,
upd_date: sql`NOW()`,
upd_user: "lst_user",
})
.where(eq(datamart.id, id as string))
.returning({ name: datamart.name }),
);
if (error) {
return apiReturn(res, {
success: true,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${query.name} encountered an error while being updated`,
data: [error.cause],
status: 200, //connect.success ? 200 : 400,
});
}
if (data) {
return apiReturn(res, {
success: true,
level: "info", //connect.success ? "info" : "error",
module: "routes",
subModule: "datamart",
message: `${data[0]?.name} was just updated`,
data: [],
status: 200, //connect.success ? 200 : 400,
});
}
} catch (err) {
if (err instanceof z.ZodError) {
const flattened = z.flattenError(err);
// return res.status(400).json({
// error: "Validation failed",
// details: flattened,
// });
return apiReturn(res, {
success: false,
level: "error", //connect.success ? "info" : "error",
module: "routes",
subModule: "auth",
message: "Validation failed",
data: [flattened],
status: 400, //connect.success ? 200 : 400,
});
}
return apiReturn(res, {
success: false,
level: "error",
module: "routes",
subModule: "datamart",
message: "There was an error updating the query",
data: [err],
status: 200,
});
}
});
export default r;

View File

@@ -4,11 +4,14 @@ import { runDatamartQuery } from "./datamart.controller.js";
const r = Router();
type Options = {
name: string;
value: string;
};
r.get("/:name", async (req, res) => {
const { name } = req.params;
const options = new URLSearchParams(
req.query as Record<string, string>,
).toString();
const options = req.query as Options;
const dataRan = await runDatamartQuery({ name, options });
return apiReturn(res, {

View File

@@ -0,0 +1,28 @@
import {
integer,
jsonb,
pgTable,
text,
timestamp,
uuid,
} from "drizzle-orm/pg-core";
import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import type { z } from "zod";
export const jobAuditLog = pgTable("job_audit_log", {
id: uuid("id").defaultRandom().primaryKey(),
jobName: text("job_name"),
startedAt: timestamp("start_at"),
finishedAt: timestamp("finished_at"),
durationMs: integer("duration_ms"),
status: text("status"), //success | error
errorMessage: text("error_message"),
errorStack: text("error_stack"),
metadata: jsonb("meta_data"),
});
export const jobAuditLogSchema = createSelectSchema(jobAuditLog);
export const newJobAuditLogSchema = createInsertSchema(jobAuditLog);
export type JobAuditLog = z.infer<typeof jobAuditLogSchema>;
export type NewJobAuditLog = z.infer<typeof newJobAuditLogSchema>;

View File

@@ -1,6 +1,7 @@
import {
boolean,
jsonb,
pgEnum,
pgTable,
text,
timestamp,
@@ -11,6 +12,12 @@ import { createInsertSchema, createSelectSchema } from "drizzle-zod";
import { z } from "zod";
export const settingType = pgEnum("setting_type", [
"feature",
"system",
"standard",
]);
export const settings = pgTable(
"settings",
{
@@ -21,6 +28,7 @@ export const settings = pgTable(
moduleName: text("moduleName"), // what part of lst dose it belong to this is used to split the settings out later
active: boolean("active").default(true),
roles: jsonb("roles").notNull().default(["systemAdmin"]), // role or roles to see this goes along with the moduleName, need to have a x role in module to see this setting.
settingType: settingType(),
add_User: text("add_User").default("LST_System").notNull(),
add_Date: timestamp("add_Date").defaultNow(),
upd_user: text("upd_User").default("LST_System").notNull(),

View File

@@ -2,84 +2,17 @@ import axios from "axios";
import { addHours } from "date-fns";
import { formatInTimeZone } from "date-fns-tz";
import { sql } from "drizzle-orm";
import { db } from "../../db/db.controller.js";
import { opendockApt } from "../../db/schema/opendock.schema.js";
import { prodQuery } from "../../prodSql/prodSqlQuery.controller.js";
import { tryCatch } from "../../utils/trycatch.utils.js";
const releaseQuery = `
SELECT
[Id]
,[ReleaseNumber]
,[CustomerReleaseNumber]
,[ReleaseState]
,[LineItemId]
,[BlanketOrderId]
,[DeliveryState]
,[ReleaseConfirmationState]
,[PlanningState]
,[OrderDate]
,cast([DeliveryDate] as datetime2) as DeliveryDate
,[LoadingDate]
,[Quantity]
,[DeliveredQuantity]
,[DeliveredQuantityTradeUnits]
,[DeliveredQuantityLoadingUnits]
,[PackagingId]
,[PackagingHumanReadableId]
,[PackagingDescription]
,[MainMaterialId]
,[MainMaterialHumanReadableId]
,[MainMaterialDescription]
,[AdditionalInformation1]
,[AdditionalInformation2]
,[D365SupplierLot]
,[TradeUnits]
,[LoadingUnits]
,[Trucks]
,[LoadingToleranceType]
,[UnderdeliveryDeviation]
,[OverdeliveryDeviation]
,[ArticleAccountRequirements_ArticleExact]
,[ArticleAccountRequirements_CustomerExact]
,[ArticleAccountRequirements_PackagingExact]
,[ArticleAccountRequirements_MainMaterialExact]
,[PriceLogicType]
,[AllowProductionLotMixing]
,[EnforceStrictPicking]
,[SalesPrice]
,[Currency]
,[QuantityUnit]
,[SalesPriceRemark]
,[DeliveryConditionId]
,[DeliveryConditionHumanReadableId]
,[DeliveryConditionDescription]
,[PaymentTermsId]
,[PaymentTermsHumanReadableId]
,[PaymentTermsDescription]
,[Remark]
,[DeliveryAddressId]
,[DeliveryAddressHumanReadableId]
,[DeliveryAddressDescription]
,[DeliveryStreetName]
,[DeliveryAddressZip]
,[DeliveryCity]
,[DeliveryCountry]
,[ReleaseDiscount]
,[CustomerArtNo]
,[LineItemHumanReadableId]
,[LineItemArticle]
,[LineItemArticleWeight]
,[LineItemQuantityType]
,[TotalPrice]
,[Add_User]
,[Add_Date]
,[Upd_User]
,cast([Upd_Date] as dateTime) as Upd_Date
,[VatRate]
,[ArticleAlias]
FROM [test1_AlplaPROD2.0_Reporting].[reporting_order].[Release] (nolock)
where format([Upd_Date], 'yyyy-MM-dd HH:mm:ss') > [dateCheck]`;
import { db } from "../db/db.controller.js";
import { opendockApt } from "../db/schema/opendock.schema.js";
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
import {
type SqlQuery,
sqlQuerySelector,
} from "../prodSql/prodSqlQuerySelector.utils.js";
import { createCronJob } from "../utils/croner.utils.js";
import { delay } from "../utils/delay.utils.js";
import { returnFunc } from "../utils/returnHelper.utils.js";
import { tryCatch } from "../utils/trycatch.utils.js";
let lastCheck = formatInTimeZone(
new Date().toISOString(),
@@ -87,10 +20,6 @@ let lastCheck = formatInTimeZone(
"yyyy-MM-dd HH:mm:ss",
);
const delay = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};
//const queue: unknown[] = [];
//const isProcessing: boolean = false;
@@ -322,37 +251,82 @@ const postRelease = async (release: Releases) => {
};
export const monitorReleaseChanges = async () => {
console.info("Starting release monitor", lastCheck);
// TODO: validate if the setting for opendocks is active and start / stop the system based on this
// if it changes we set to false and the next loop will stop.
const openDockMonitor = true;
// console.info("Starting release monitor", lastCheck);
const sqlQuery = sqlQuerySelector(`releaseChecks`) as SqlQuery;
if (!sqlQuery.success) {
return returnFunc({
success: false,
level: "error",
module: "datamart",
subModule: "query",
message: `Error getting releaseChecks info`,
data: [sqlQuery.message],
notify: false,
});
}
if (openDockMonitor) {
createCronJob("open-dock-monitor", "*/15 * * * * *", async () => {
try {
const result = await prodQuery(
sqlQuery.query.replace("[dateCheck]", `'${lastCheck}'`),
"Get release info",
);
if (result.data.length) {
for (const release of result.data) {
await postRelease(release);
lastCheck = formatInTimeZone(
new Date(release.Upd_Date).toISOString(),
"UTC",
"yyyy-MM-dd HH:mm:ss",
);
await delay(500);
}
}
} catch (e) {
console.error("Monitor error:", e);
}
});
}
// run the main game loop
while (true) {
try {
const result = await prodQuery(
releaseQuery.replace("[dateCheck]", `'${lastCheck}'`),
"get last release change",
);
// while (openDockSetting) {
// try {
// const result = await prodQuery(
// sqlQuery.query.replace("[dateCheck]", `'${lastCheck}'`),
// "Get release info",
// );
if (result.data.length) {
for (const release of result.data) {
// potentially move this to a buffer table to easy up on memory
await postRelease(release);
// if (result.data.length) {
// for (const release of result.data) {
// // potentially move this to a buffer table to easy up on memory
// await postRelease(release);
// Move checkpoint AFTER successful post
lastCheck = formatInTimeZone(
new Date(release.Upd_Date).toISOString(),
"UTC",
"yyyy-MM-dd HH:mm:ss",
);
// // Move checkpoint AFTER successful post
// lastCheck = formatInTimeZone(
// new Date(release.Upd_Date).toISOString(),
// "UTC",
// "yyyy-MM-dd HH:mm:ss",
// );
await delay(500);
}
}
} catch (e) {
console.error("Monitor error:", e);
}
// await delay(500);
// }
// }
// } catch (e) {
// console.error("Monitor error:", e);
// }
await delay(15 * 1000); // making this 15 seconds as we would really only see issues if we have a mass burst.
}
// await delay(15 * 1000); // making this 15 seconds as we would really only see issues if we have a mass burst.
// }
};
const getToken = async () => {

View File

@@ -0,0 +1,208 @@
use AlplaPROD_test1
SELECT V_Artikel.IdArtikelvarianten,
V_Artikel.Bezeichnung,
V_Artikel.ArtikelvariantenTypBez,
V_Artikel.PreisEinheitBez,
case when sales.price is null then 0 else sales.price end as salesPrice,
TypeOfMaterial=CASE
WHEN
V_Artikel.ArtikelvariantenTypBez LIKE'%Additive'
Then 'AD'
when V_Artikel.ArtikelvariantenTypBez Like '%Masterbatch'
THEN 'MB'
WHEN V_Artikel.ArtikelvariantenTypBez ='Pallet' or
V_Artikel.ArtikelvariantenTypBez ='Top' or
V_Artikel.ArtikelvariantenTypBez ='Bags' or
V_Artikel.ArtikelvariantenTypBez ='Bag' or
V_Artikel.ArtikelvariantenTypBez ='Stretch Wrap' or
V_Artikel.ArtikelvariantenTypBez ='Stretch Film' or
V_Artikel.ArtikelvariantenTypBez ='Banding Materials' or
V_Artikel.ArtikelvariantenTypBez ='Carton' or
V_Artikel.ArtikelvariantenTypBez ='Re-Shipper Box' or
V_Artikel.ArtikelvariantenTypBez ='Label' or
V_Artikel.ArtikelvariantenTypBez ='Pallet Label' or
V_Artikel.ArtikelvariantenTypBez ='Carton Label' or
V_Artikel.ArtikelvariantenTypBez ='Liner' or
V_Artikel.ArtikelvariantenTypBez ='Dose Cup' or
V_Artikel.ArtikelvariantenTypBez ='Metal Cage' or
V_Artikel.ArtikelvariantenTypBez ='Spout' or
V_Artikel.ArtikelvariantenTypBez = 'Slip Sheet' or
V_Artikel.ArtikelvariantenTypBez = 'Palet' or
V_Artikel.ArtikelvariantenTypBez = 'LID' or
V_Artikel.ArtikelvariantenTypBez= 'Metal' or
V_Artikel.ArtikelvariantenTypBez= 'Corner post' or
V_Artikel.ArtikelvariantenTypBez= 'Bottle Label' or
V_Artikel.ArtikelvariantenTypBez = 'Paper label' or
V_Artikel.ArtikelvariantenTypBez = 'Banding' or
V_Artikel.ArtikelvariantenTypBez = 'Glue' or
V_Artikel.ArtikelvariantenTypBez = 'Top Frame' or
V_Artikel.ArtikelvariantenTypBez = 'IML Label' or
V_Artikel.ArtikelvariantenTypBez = 'Purch EBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'Purchased Spout' or
V_Artikel.ArtikelvariantenTypBez = 'Gaylord' or
V_Artikel.ArtikelvariantenTypBez = 'Misc. Packaging' or
V_Artikel.ArtikelvariantenTypBez = 'Sleeve' or
V_Artikel.ArtikelvariantenTypBez = 'Plastic Bag' or
V_Artikel.ArtikelvariantenTypBez = 'Purch Spout' or
V_Artikel.ArtikelvariantenTypBez = 'Seal' or
V_Artikel.ArtikelvariantenTypBez = 'Tape' or
V_Artikel.ArtikelvariantenTypBez = 'Box' or
V_Artikel.ArtikelvariantenTypBez = 'Label IML' or
V_Artikel.ArtikelvariantenTypBez = 'Pallet Runner'
THEN 'PKG'
WHEN V_Artikel.ArtikelvariantenTypBez='HD-PE' or
V_Artikel.ArtikelvariantenTypBez='HD-PE PCR' or
V_Artikel.ArtikelvariantenTypBez='HD-PP' or
V_Artikel.ArtikelvariantenTypBez= 'PP' or
V_Artikel.ArtikelvariantenTypBez LIKE '%PCR' or
V_Artikel.ArtikelvariantenTypBez= 'LDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PP' or
V_Artikel.ArtikelvariantenTypBez= 'HDPE' or
V_Artikel.ArtikelvariantenTypBez= 'PET' or
V_Artikel.ArtikelvariantenTypBez= 'PET-P' or
V_Artikel.ArtikelvariantenTypBez= 'PET-G'
THEN 'MM'
WHEN
V_Artikel.ArtikelvariantenTypBez='HDPE-Waste' or
V_Artikel.ArtikelvariantenTypBez='$Waste Container' or
V_Artikel.ArtikelvariantenTypBez='Mixed-Waste' or
V_Artikel.ArtikelvariantenTypBez LIKE'%-Waste%'
THEN 'Waste'
WHEN
V_Artikel.ArtikelvariantenTypBez = 'Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'SBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'EBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'ISBM Bottle' or
V_Artikel.ArtikelvariantenTypBez = 'Decorated Bottle'
THEN 'Bottle'
WHEN V_Artikel.ArtikelvariantenTypBez = 'Preform'
Then 'Preform'
When
V_Artikel.ArtikelvariantenTypBez = 'Purchased Preform' or
V_Artikel.ArtikelvariantenTypBez = 'Purchased Caps' or
V_Artikel.ArtikelvariantenTypBez = 'Purchased_preform'
THEN 'Purchased_preform'
When
V_Artikel.ArtikelvariantenTypBez = 'Closures' or
V_Artikel.ArtikelvariantenTypBez = 'Cap'
THEN 'Caps'
When
V_Artikel.ArtikelvariantenTypBez = 'Dummy'
THEN 'Not used'
ELSE 'Item not defined' END
,V_Artikel.IdArtikelvariantenTyp,
Round(V_Artikel.ArtikelGewicht, 3) as Article_Weight,
IdAdresse,
AdressBez,
AdressTypBez,
ProdBereichBez,
FG=case when
V_Artikel.ProdBereichBez = 'SBM' or
V_Artikel.ProdBereichBez = 'IM-Caps' or
V_Artikel.ProdBereichBez = 'IM-PET' or
V_Artikel.ProdBereichBez = 'PRINT OFFICE' or
V_Artikel.ProdBereichBez = 'EBM' or
V_Artikel.ProdBereichBez = 'ISBM' or
V_Artikel.ProdBereichBez = 'IM-Finishing'
Then 'FG'
Else 'not Defined Profit Center'
end,
V_Artikel.Umlaeufe as num_of_cycles,
V_FibuKonten_BASIS.FibuKontoNr as CostsCenterId,
V_FibuKonten_BASIS.Bezeichnung as CostCenterDescription,
sales.[KdArtNr] as CustomerArticleNumber,
sales.[KdArtBez] as CustomerArticleDescription,
round(V_Artikel.Zyklus, 2) as CycleTime,
Sypronummer as salesAgreement,
V_Artikel.ProdArtikelBez as ProductFamily
--,REPLACE(pur.UOM,'UOM:','')
,Case when LEFT(
LTRIM(REPLACE(pur.UOM,'UOM:','')),
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
) is null then '1' else LEFT(
LTRIM(REPLACE(pur.UOM,'UOM:','')),
CHARINDEX(' ', LTRIM(REPLACE(REPLACE(pur.UOM,'UOM:',''), CHAR(13)+CHAR(10), ' ')) + ' ') - 1
) end AS UOM
--,*
FROM dbo.V_Artikel (nolock)
join
dbo.V_Artikelvarianten (nolock) on dbo.V_Artikel.IdArtikelvarianten =
dbo.V_Artikelvarianten.IdArtikelvarianten
join
dbo.V_FibuKonten_BASIS (nolock) on dbo.V_Artikelvarianten.IdFibuKonto =
dbo.V_FibuKonten_BASIS.IdFibuKonto
-- adding in the sales price
left join
(select * from
(select
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
IdArtikelvarianten as av
,GueltigabDatum as validDate
,VKPreis as price
,[KdArtNr]
,[KdArtBez]
--,*
from dbo.T_HistoryVK (nolock)
where
--GueltigabDatum > getDate() - 120
--and
Aktiv = 1
and StandardKunde = 1 -- default address
) a
where RN = 1) as sales
on dbo.V_Artikel.IdArtikelvarianten = sales.av
/* adding the purchase price info */
left join
(select * from
(select
ROW_NUMBER() OVER (PARTITION BY IdArtikelvarianten ORDER BY GueltigabDatum DESC) AS RN,
IdArtikelvarianten as av
,GueltigabDatum as validDate
,EKPreis as price
,LiefArtNr as supplierNr
--,CASE
-- WHEN Bemerkung IS NOT NULL AND Bemerkung LIKE '%UOM:%'
-- THEN
-- -- incase there is something funny going on in the remark well jsut check for new lines and what not
-- LEFT(
-- REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' '),
-- CASE
-- WHEN CHARINDEX(' ', REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' ')) > 0
-- THEN CHARINDEX(' ', REPLACE(REPLACE(Bemerkung, CHAR(13)+CHAR(10), ' '), CHAR(10), ' ')) - 1
-- ELSE LEN(Bemerkung)
-- END
-- )
-- ELSE 'UOM:1'
-- END AS UOM
,CASE
WHEN Bemerkung IS NOT NULL AND Bemerkung LIKE '%UOM:%'
THEN
LTRIM(
SUBSTRING(
Bemerkung,
CHARINDEX('UOM:', UPPER(Bemerkung)) + LEN('UOM:'),
LEN(Bemerkung)
)
)
ELSE
'UOM:1'
END AS UOM
,Bemerkung
--,*
from dbo.T_HistoryEK (nolock)
where
StandardLieferant = 1 -- default address
) a
where RN = 1) as pur
on dbo.V_Artikel.IdArtikelvarianten = pur.av
where V_Artikel.aktiv = 1 --and dbo.V_Artikel.IdArtikelvarianten = 1445
order by V_Artikel.IdArtikelvarianten /*, TypeOfMaterial */

View File

@@ -0,0 +1,74 @@
use [test1_AlplaPROD2.0_Read]
DECLARE @StartDate DATE = '[startDate]' -- 2025-1-1
DECLARE @EndDate DATE = '[endDate]' -- 2025-1-31
SELECT
r.[ArticleHumanReadableId]
,[ReleaseNumber]
,h.CustomerOrderNumber
,x.CustomerLineItemNumber
,[CustomerReleaseNumber]
,[ReleaseState]
,[DeliveryState]
,ea.JournalNummer as BOL_Number
,[ReleaseConfirmationState]
,[PlanningState]
--,format(r.[OrderDate], 'yyyy-MM-dd HH:mm') as OrderDate
,r.[OrderDate]
--,FORMAT(r.[DeliveryDate], 'yyyy-MM-dd HH:mm') as DeliveryDate
,r.[DeliveryDate]
--,FORMAT(r.[LoadingDate], 'yyyy-MM-dd HH:mm') as LoadingDate
,r.[LoadingDate]
,[Quantity]
,[DeliveredQuantity]
,r.[AdditionalInformation1]
,r.[AdditionalInformation2]
,[TradeUnits]
,[LoadingUnits]
,[Trucks]
,[LoadingToleranceType]
,[SalesPrice]
,[Currency]
,[QuantityUnit]
,[SalesPriceRemark]
,r.[Remark]
,[Irradiated]
,r.[CreatedByEdi]
,[DeliveryAddressHumanReadableId]
,DeliveryAddressDescription
,[CustomerArtNo]
,[TotalPrice]
,r.[ArticleAlias]
FROM [order].[Release] (nolock) as r
left join
[order].LineItem as x on
r.LineItemId = x.id
left join
[order].Header as h on
x.HeaderId = h.id
--bol stuff
left join
AlplaPROD_test1.dbo.V_LadePlanungenLadeAuftragAbruf (nolock) as zz
on zz.AbrufIdAuftragsAbruf = r.ReleaseNumber
left join
(select * from (SELECT
ROW_NUMBER() OVER (PARTITION BY IdJournal ORDER BY add_date DESC) AS RowNum
,*
FROM [AlplaPROD_test1].[dbo].[T_Lieferungen] (nolock)) x
where RowNum = 1) as ea on
zz.IdLieferschein = ea.IdJournal
where
--r.ArticleHumanReadableId in ([articles])
--r.ReleaseNumber = 1452
r.DeliveryDate between @StartDate AND @EndDate
and DeliveredQuantity > 0
--and Journalnummer = 169386

View File

@@ -0,0 +1,72 @@
SELECT
[Id]
,[ReleaseNumber]
,[CustomerReleaseNumber]
,[ReleaseState]
,[LineItemId]
,[BlanketOrderId]
,[DeliveryState]
,[ReleaseConfirmationState]
,[PlanningState]
,[OrderDate]
,cast([DeliveryDate] as datetime2) as DeliveryDate
,[LoadingDate]
,[Quantity]
,[DeliveredQuantity]
,[DeliveredQuantityTradeUnits]
,[DeliveredQuantityLoadingUnits]
,[PackagingId]
,[PackagingHumanReadableId]
,[PackagingDescription]
,[MainMaterialId]
,[MainMaterialHumanReadableId]
,[MainMaterialDescription]
,[AdditionalInformation1]
,[AdditionalInformation2]
,[D365SupplierLot]
,[TradeUnits]
,[LoadingUnits]
,[Trucks]
,[LoadingToleranceType]
,[UnderdeliveryDeviation]
,[OverdeliveryDeviation]
,[ArticleAccountRequirements_ArticleExact]
,[ArticleAccountRequirements_CustomerExact]
,[ArticleAccountRequirements_PackagingExact]
,[ArticleAccountRequirements_MainMaterialExact]
,[PriceLogicType]
,[AllowProductionLotMixing]
,[EnforceStrictPicking]
,[SalesPrice]
,[Currency]
,[QuantityUnit]
,[SalesPriceRemark]
,[DeliveryConditionId]
,[DeliveryConditionHumanReadableId]
,[DeliveryConditionDescription]
,[PaymentTermsId]
,[PaymentTermsHumanReadableId]
,[PaymentTermsDescription]
,[Remark]
,[DeliveryAddressId]
,[DeliveryAddressHumanReadableId]
,[DeliveryAddressDescription]
,[DeliveryStreetName]
,[DeliveryAddressZip]
,[DeliveryCity]
,[DeliveryCountry]
,[ReleaseDiscount]
,[CustomerArtNo]
,[LineItemHumanReadableId]
,[LineItemArticle]
,[LineItemArticleWeight]
,[LineItemQuantityType]
,[TotalPrice]
,[Add_User]
,[Add_Date]
,[Upd_User]
,cast([Upd_Date] as dateTime) as Upd_Date
,[VatRate]
,[ArticleAlias]
FROM [test1_AlplaPROD2.0_Reporting].[reporting_order].[Release] (nolock)
where format([Upd_Date], 'yyyy-MM-dd HH:mm:ss') > [dateCheck]

View File

@@ -4,12 +4,12 @@ import { setupAuthRoutes } from "./auth/auth.routes.js";
import { setupApiDocsRoutes } from "./configs/scaler.config.js";
import { setupDatamartRoutes } from "./datamart/datamart.routes.js";
import { setupProdSqlRoutes } from "./prodSql/prodSql.routes.js";
import stats from "./system/stats.route.js";
import { setupSystemRoutes } from "./system/system.routes.js";
import { setupUtilsRoutes } from "./utils/utils.routes.js";
export const setupRoutes = (baseUrl: string, app: Express) => {
app.use(`${baseUrl}/api/stats`, stats);
//routes that are on by default
setupSystemRoutes(baseUrl, app);
setupApiDocsRoutes(baseUrl, app);
setupProdSqlRoutes(baseUrl, app);
setupDatamartRoutes(baseUrl, app);

View File

@@ -1,9 +1,8 @@
import { createServer } from "node:http";
import os from "node:os";
import createApp from "./app.js";
import { startDatamartSync } from "./datamart/datamartSync.controller.js";
import { createLogger } from "./logger/logger.controller.js";
import { monitorReleaseChanges } from "./opendock/utils/releaseMonitor.utils.js";
import { monitorReleaseChanges } from "./opendock/releaseMonitor.utils.js";
import { connectProdSql } from "./prodSql/prodSqlConnection.controller.js";
import { setupSocketIORoutes } from "./socket.io/serverSetup.js";
@@ -14,7 +13,6 @@ const start = async () => {
// triggering long lived processes
connectProdSql();
startDatamartSync(); // TODO: Remove this and all the other data related to it as we dont want this idea anymore
// start long live processes
setTimeout(() => {

View File

@@ -23,6 +23,7 @@ router.get("/", async (_, res) => {
? sqlServerStats?.data[0].UptimeSeconds
: [],
eomFGPkgSheetVersion: 1, // this is the excel file version when we have a change to the macro we want to grab this
masterMacroFile: 1,
});
});

View File

@@ -0,0 +1,9 @@
import type { Express } from "express";
import stats from "./stats.route.js";
export const setupSystemRoutes = (baseUrl: string, app: Express) => {
//stats will be like this as we dont need to change this
app.use(`${baseUrl}/api/stats`, stats);
// all other system should be under /api/system/*
};

View File

@@ -1,4 +1,7 @@
import { jobAuditLog } from "backend/db/schema/auditLog.schema.js";
import { Cron } from "croner";
import { eq } from "drizzle-orm";
import { db } from "../db/db.controller.js";
import { createLogger } from "../logger/logger.controller.js";
// example createJob
@@ -16,15 +19,22 @@ export interface JobInfo {
// Store running cronjobs
export const runningCrons: Record<string, Cron> = {};
/**
*
* @param name Name of the job we want to run
* @param schedule Cron expression (example: `*\/5 * * * * *`)
* @param task Async function that will run
*/
export const createCronJob = async (
name: string,
schedule: string, // cron string with 8 8 IE: */5 * * * * * every 5th second
task?: () => Promise<void>, // what function are we passing over
task: () => Promise<void>, // what function are we passing over
) => {
// get the timezone based on the os timezone set
const timeZone = Intl.DateTimeFormat().resolvedOptions().timeZone;
const log = createLogger({ module: "system", subModule: "croner" });
// Destroy existing job if it exists
// Destroy existing job if it exist
if (runningCrons[name]) {
runningCrons[name].stop();
}
@@ -37,10 +47,48 @@ export const createCronJob = async (
catch: true, // Prevents unhandled rejections
name: name,
},
task,
);
async () => {
const startedAt = new Date();
const start = Date.now();
const log = createLogger({ module: "system", subModule: "croner" });
let executionId: string = "";
try {
const [execution] = await db
.insert(jobAuditLog)
.values({
jobName: name,
startedAt,
status: "running",
})
.returning();
executionId = execution?.id as string;
await task?.();
// tell it we done
await db
.update(jobAuditLog)
.set({
finishedAt: new Date(),
durationMs: Date.now() - start,
status: "success",
})
.where(eq(jobAuditLog.id, executionId));
} catch (e: any) {
if (executionId) {
await db.update(jobAuditLog).set({
finishedAt: new Date(),
durationMs: Date.now() - start,
status: "error",
errorMessage: e.message,
errorStack: e.stack,
});
}
}
},
);
log.info({}, `A job for ${name} was just created.`);
};

View File

@@ -0,0 +1,19 @@
import { Router } from "express";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { getAllJobs } from "./croner.utils.js";
const r = Router();
r.get("/", async (_, res) => {
return apiReturn(res, {
success: true,
level: "info",
module: "utils",
subModule: "jobs",
message: "All current Jobs",
data: getAllJobs(),
status: 200,
});
});
export default r;

View File

@@ -0,0 +1,63 @@
import { Router } from "express";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { getAllJobs, resumeCronJob, stopCronJob } from "./croner.utils.js";
const r = Router();
r.patch("/:status", async (req, res) => {
const { status } = req.params;
const body = req.body;
if (!body.name) {
return apiReturn(res, {
success: false,
level: "error",
module: "utils",
subModule: "jobs",
message: "Missing manadatory name ",
data: getAllJobs(),
status: 400,
});
}
const statusCheck = ["start", "stop"];
if (!statusCheck.includes(status)) {
return apiReturn(res, {
success: false,
level: "error",
module: "utils",
subModule: "jobs",
message: "You have passed an invalid option please try again. ",
data: getAllJobs(),
status: 400,
});
}
if (status === "start") {
resumeCronJob(body.name);
return apiReturn(res, {
success: true,
level: "info",
module: "utils",
subModule: "jobs",
message: `${name} was restarted`,
data: getAllJobs(),
status: 200,
});
}
if (status === "stop") {
stopCronJob(body.name);
return apiReturn(res, {
success: true,
level: "info",
module: "utils",
subModule: "jobs",
message: `${body.name} was stopped`,
data: getAllJobs(),
status: 200,
});
}
});
export default r;

View File

@@ -0,0 +1,3 @@
export const delay = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};

View File

@@ -1,16 +1,8 @@
import type { Express } from "express";
import { getAllJobs } from "./croner.utils.js";
import { apiReturn } from "./returnHelper.utils.js";
import getActiveJobs from "./cronnerActiveJobs.route.js";
import jobStatusChange from "./cronnerStatusChange.js";
export const setupUtilsRoutes = (baseUrl: string, app: Express) => {
app.get(`${baseUrl}/api/utils`, (_, res) => {
return apiReturn(res, {
success: true,
level: "info",
module: "utils",
subModule: "jobs",
message: "All current Jobs",
data: getAllJobs(),
status: 200,
});
});
app.use(`${baseUrl}/api/utils/croner`, getActiveJobs);
app.use(`${baseUrl}/api/utils/croner`, jobStatusChange);
};

View File

@@ -7,7 +7,8 @@
"defaultBranch": "main"
},
"files": {
"ignoreUnknown": false
"ignoreUnknown": false,
"includes": ["**", "!!**/dist","!!**/frontend", "!!**/lst_docs"]
},
"formatter": {
"enabled": true,

View File

@@ -0,0 +1,2 @@
CREATE TYPE "public"."setting_type" AS ENUM('feature', 'system', 'standard');--> statement-breakpoint
ALTER TABLE "settings" ADD COLUMN "settingType" "setting_type";

View File

@@ -0,0 +1,11 @@
CREATE TABLE "job_audit_log" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"job_name" text,
"start_at" timestamp,
"finished_at" timestamp,
"duration_ms" integer,
"status" text,
"error_message" text,
"error_stack" text,
"meta_data" jsonb
);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -71,6 +71,20 @@
"when": 1771343379107,
"tag": "0009_hesitant_nextwave",
"breakpoints": true
},
{
"idx": 10,
"version": "7",
"when": 1771448444754,
"tag": "0010_handy_ironclad",
"breakpoints": true
},
{
"idx": 11,
"version": "7",
"when": 1771515240318,
"tag": "0011_eminent_iron_patriot",
"breakpoints": true
}
]
}