feat(puchase history): purhcase history changed to long running no notification
This commit is contained in:
@@ -15,14 +15,14 @@ export const alplaPurchaseHistory = pgTable("alpla_purchase_history", {
|
||||
revision: integer("revision"),
|
||||
confirmed: integer("confirmed"),
|
||||
status: integer("status"),
|
||||
statusText: integer("status_text"),
|
||||
statusText: text("status_text"),
|
||||
journalNum: integer("journal_num"),
|
||||
add_date: timestamp("add_date").defaultNow(),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
add_user: text("add_user"),
|
||||
upd_user: text("upd_user"),
|
||||
upd_date: timestamp("upd_date").defaultNow(),
|
||||
remark: text("remark"),
|
||||
approvedStatus: text("approved_status"),
|
||||
approvedStatus: text("approved_status").default("pending"),
|
||||
position: jsonb("position").default([]),
|
||||
createdAt: timestamp("created_at").defaultNow(),
|
||||
});
|
||||
|
||||
@@ -15,16 +15,18 @@ IdBestellung as apo
|
||||
when 1 then 'Created'
|
||||
when 2 then 'Ordered'
|
||||
when 22 then 'Reopened'
|
||||
when 11 then 'Reopened'
|
||||
when 4 then 'Planned'
|
||||
when 5 then 'Partly Delivered'
|
||||
when 6 then 'Delivered'
|
||||
when 7 then 'Canceled'
|
||||
when 8 then 'Closed'
|
||||
else 'Unknown' end as statusText
|
||||
,po.Add_User
|
||||
,po.Add_Date
|
||||
,po.Upd_User
|
||||
,po.Upd_Date
|
||||
,po.IdJournal as journalNum -- use this to validate if we used it already.
|
||||
,po.Add_User as add_user
|
||||
,po.Add_Date as add_date
|
||||
,po.Upd_User as upd_user
|
||||
,po.Upd_Date as upd_Date
|
||||
,po.Bemerkung as remark
|
||||
,po.IdJournal as journal -- use this to validate if we used it already.
|
||||
,isnull((
|
||||
@@ -55,7 +57,7 @@ left join
|
||||
a.IdArtikelvarianten = o.IdArtikelVarianten
|
||||
where o.IdBestellung = po.IdBestellung
|
||||
for json path
|
||||
), '[]') as postion
|
||||
), '[]') as position
|
||||
--,*
|
||||
from T_Bestellungen (nolock) as po
|
||||
where po.Upd_Date > dateadd(MINUTE, -@intervalCheck, getdate())
|
||||
@@ -4,6 +4,10 @@
|
||||
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../db/db.controller.js";
|
||||
import {
|
||||
alplaPurchaseHistory,
|
||||
type NewAlplaPurchaseHistory,
|
||||
} from "../db/schema/alplapurchase.schema.js";
|
||||
import { settings } from "../db/schema/settings.schema.js";
|
||||
import { createLogger } from "../logger/logger.controller.js";
|
||||
import { prodQuery } from "../prodSql/prodSqlQuery.controller.js";
|
||||
@@ -14,6 +18,7 @@ import {
|
||||
import { createCronJob } from "../utils/croner.utils.js";
|
||||
import { delay } from "../utils/delay.utils.js";
|
||||
import { returnFunc } from "../utils/returnHelper.utils.js";
|
||||
import { tryCatch } from "../utils/trycatch.utils.js";
|
||||
|
||||
const log = createLogger({ module: "purchase", subModule: "purchaseMonitor" });
|
||||
|
||||
@@ -38,17 +43,44 @@ export const monitorAlplaPurchase = async () => {
|
||||
}
|
||||
|
||||
if (purchaseMonitor[0]?.active) {
|
||||
createCronJob("opendock_sync", "* */5 * * * *", async () => {
|
||||
createCronJob("purchaseMonitor", "0 */5 * * * *", async () => {
|
||||
try {
|
||||
const result = await prodQuery(
|
||||
sqlQuery.query.replace(
|
||||
"[interval]",
|
||||
`'${purchaseMonitor[0]?.value || "5"}'`,
|
||||
`${purchaseMonitor[0]?.value || "5"}`,
|
||||
),
|
||||
"Get release info",
|
||||
);
|
||||
|
||||
log.debug(
|
||||
{},
|
||||
`There are ${result.data.length} pending to be updated from the last ${purchaseMonitor[0]?.value}`,
|
||||
);
|
||||
if (result.data.length) {
|
||||
const convertedData = result.data.map((i) => ({
|
||||
...i,
|
||||
position: JSON.parse(i.position),
|
||||
})) as NewAlplaPurchaseHistory;
|
||||
|
||||
const { data, error } = await tryCatch(
|
||||
db.insert(alplaPurchaseHistory).values(convertedData).returning(),
|
||||
);
|
||||
|
||||
if (data) {
|
||||
log.debug(
|
||||
{ data },
|
||||
"New data was just added to alpla purchase history",
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
log.error(
|
||||
{ error },
|
||||
"There was an error adding alpla purchase history",
|
||||
);
|
||||
}
|
||||
|
||||
await delay(500);
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -57,6 +89,8 @@ export const monitorAlplaPurchase = async () => {
|
||||
"Error occurred while running the monitor job",
|
||||
);
|
||||
log.error({ error: e }, "Error occurred while running the monitor job");
|
||||
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ const newSettings: NewSetting[] = [
|
||||
// feature settings
|
||||
{
|
||||
name: "opendock_sync",
|
||||
value: "0",
|
||||
value: "15",
|
||||
active: false,
|
||||
description: "Dock Scheduling system",
|
||||
moduleName: "opendock",
|
||||
@@ -69,7 +69,7 @@ const newSettings: NewSetting[] = [
|
||||
{
|
||||
name: "purchaseMonitor",
|
||||
value: "5",
|
||||
active: false,
|
||||
active: true,
|
||||
description: "Monitors alpla purchase fo all changes",
|
||||
moduleName: "purchase",
|
||||
settingType: "feature",
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
killOpendockSocket,
|
||||
opendockSocketMonitor,
|
||||
} from "../opendock/opendockSocketMonitor.utils.js";
|
||||
import { monitorAlplaPurchase } from "../purchase/purchase.controller.js";
|
||||
import {
|
||||
createCronJob,
|
||||
resumeCronJob,
|
||||
@@ -31,8 +32,24 @@ export const featureControl = async (data: Setting) => {
|
||||
createCronJob("opendockAptCleanup", "0 30 5 * * *", () =>
|
||||
dbCleanup("opendockApt", 90),
|
||||
);
|
||||
} else {
|
||||
}
|
||||
|
||||
if (data.name === "opendock_sync" && !data.active) {
|
||||
killOpendockSocket();
|
||||
stopCronJob("opendockAptCleanup");
|
||||
}
|
||||
|
||||
// purchase stuff
|
||||
if (data.name === "purchaseMonitor" && data.active) {
|
||||
monitorAlplaPurchase();
|
||||
}
|
||||
|
||||
if (data.name === "purchaseMonitor" && !data.active) {
|
||||
stopCronJob("purchaseMonitor");
|
||||
}
|
||||
|
||||
// this means the data time has changed
|
||||
if (data.name === "purchaseMonitor" && data.value) {
|
||||
monitorAlplaPurchase();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -18,7 +18,9 @@ export interface JobInfo {
|
||||
|
||||
// Store running cronjobs
|
||||
export const runningCrons: Record<string, Cron> = {};
|
||||
const activeRuns = new Set<string>();
|
||||
const log = createLogger({ module: "system", subModule: "croner" });
|
||||
const cronStats: Record<string, { created: number; replaced: number }> = {};
|
||||
|
||||
// how to se the times
|
||||
// * ┌──────────────── (optional) second (0 - 59)
|
||||
@@ -38,17 +40,36 @@ const log = createLogger({ module: "system", subModule: "croner" });
|
||||
* @param name Name of the job we want to run
|
||||
* @param schedule Cron expression (example: `*\/5 * * * * *`)
|
||||
* @param task Async function that will run
|
||||
* @param source we can add where it came from to assist in getting this tracked down, more for debugging
|
||||
*/
|
||||
export const createCronJob = async (
|
||||
name: string,
|
||||
schedule: string, // cron string with 8 8 IE: */5 * * * * * every 5th second
|
||||
task: () => Promise<void>, // what function are we passing over
|
||||
source = "unknown",
|
||||
) => {
|
||||
// get the timezone based on the os timezone set
|
||||
const timeZone = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||
|
||||
// initial go so just store it this is more for debugging to see if something crazy keeps happening
|
||||
if (!cronStats[name]) {
|
||||
cronStats[name] = { created: 0, replaced: 0 };
|
||||
}
|
||||
|
||||
// Destroy existing job if it exist
|
||||
if (runningCrons[name]) {
|
||||
cronStats[name].replaced += 1;
|
||||
log.warn(
|
||||
{
|
||||
job: name,
|
||||
source,
|
||||
oldSchedule: runningCrons[name].getPattern?.(),
|
||||
newSchedule: schedule,
|
||||
replaceCount: cronStats[name].replaced,
|
||||
},
|
||||
`Cron job "${name}" already existed and is being replaced`,
|
||||
);
|
||||
|
||||
runningCrons[name].stop();
|
||||
}
|
||||
|
||||
@@ -61,6 +82,13 @@ export const createCronJob = async (
|
||||
name: name,
|
||||
},
|
||||
async () => {
|
||||
if (activeRuns.has(name)) {
|
||||
log.warn({ jobName: name }, "Skipping overlapping cron execution");
|
||||
return;
|
||||
}
|
||||
|
||||
activeRuns.add(name);
|
||||
|
||||
const startedAt = new Date();
|
||||
const start = Date.now();
|
||||
|
||||
@@ -91,14 +119,19 @@ export const createCronJob = async (
|
||||
.where(eq(jobAuditLog.id, executionId));
|
||||
} catch (e: any) {
|
||||
if (executionId) {
|
||||
await db.update(jobAuditLog).set({
|
||||
finishedAt: new Date(),
|
||||
durationMs: Date.now() - start,
|
||||
status: "error",
|
||||
errorMessage: e.message,
|
||||
errorStack: e.stack,
|
||||
});
|
||||
await db
|
||||
.update(jobAuditLog)
|
||||
.set({
|
||||
finishedAt: new Date(),
|
||||
durationMs: Date.now() - start,
|
||||
status: "error",
|
||||
errorMessage: e.message,
|
||||
errorStack: e.stack,
|
||||
})
|
||||
.where(eq(jobAuditLog.id, executionId));
|
||||
}
|
||||
} finally {
|
||||
activeRuns.delete(name);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
2
migrations/0021_slimy_master_mold.sql
Normal file
2
migrations/0021_slimy_master_mold.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "alpla_purchase_history" ADD COLUMN "journal_num" integer;--> statement-breakpoint
|
||||
ALTER TABLE "alpla_purchase_history" ADD COLUMN "approved_status" text;
|
||||
1
migrations/0022_large_sumo.sql
Normal file
1
migrations/0022_large_sumo.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "alpla_purchase_history" ALTER COLUMN "approved_status" SET DEFAULT 'pending';
|
||||
1
migrations/0023_normal_hellion.sql
Normal file
1
migrations/0023_normal_hellion.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "alpla_purchase_history" ALTER COLUMN "status_text" SET DATA TYPE text;
|
||||
6
migrations/0024_absent_barracuda.sql
Normal file
6
migrations/0024_absent_barracuda.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
ALTER TABLE "opendock_apt" ALTER COLUMN "release" SET NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "opendock_apt" ALTER COLUMN "appointment" SET NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "opendock_apt" ALTER COLUMN "upd_date" SET NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "opendock_apt" ALTER COLUMN "created_at" SET NOT NULL;--> statement-breakpoint
|
||||
CREATE INDEX "opendock_apt_release_idx" ON "opendock_apt" USING btree ("release");--> statement-breakpoint
|
||||
CREATE INDEX "opendock_apt_opendock_id_idx" ON "opendock_apt" USING btree ("open_dock_apt_id");
|
||||
1435
migrations/meta/0021_snapshot.json
Normal file
1435
migrations/meta/0021_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1436
migrations/meta/0022_snapshot.json
Normal file
1436
migrations/meta/0022_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1436
migrations/meta/0023_snapshot.json
Normal file
1436
migrations/meta/0023_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1467
migrations/meta/0024_snapshot.json
Normal file
1467
migrations/meta/0024_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -148,6 +148,34 @@
|
||||
"when": 1775566910220,
|
||||
"tag": "0020_stale_ma_gnuci",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 21,
|
||||
"version": "7",
|
||||
"when": 1775647109925,
|
||||
"tag": "0021_slimy_master_mold",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 22,
|
||||
"version": "7",
|
||||
"when": 1775649219780,
|
||||
"tag": "0022_large_sumo",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 23,
|
||||
"version": "7",
|
||||
"when": 1775650901523,
|
||||
"tag": "0023_normal_hellion",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 24,
|
||||
"version": "7",
|
||||
"when": 1775661516749,
|
||||
"tag": "0024_absent_barracuda",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user