/** * If we are running in client mode we want to periodically check the SERVER_NAME for new/updates queries * this will be on a croner job, we will check 2 times a day for new data, we will also have a route we can trigger to check this manually in case we have * queries we make for one plant but will eventually go to all plants. * in client mode we will not be able to add, update, or delete, or push updates * * if we are running on server mode we will provide all queries. * when pushing to another server we will allow all or just a single server by plant token. * allow for new queries to be added * allow for queries to be updated by id * table will be * id * name * description * query * version * active * options (string ie start,end) * add_date * add_user * upd_date * upd_user * * if we are running in localhost or dev or just someone running the server on there computer but using localhost we will allow to push to the main server the SERVER_NAME in the env should point to the main server * that way when we check if we are in production we will know. * the node env must also be set non production in order to push to the main server. * we will also be able to do all the same as the server mode but the push here will just go to the main server. */ import axios from "axios"; import { count, sql } from "drizzle-orm"; import { db } from "../db/db.controller.js"; import { datamart } from "../db/schema/datamart.schema.js"; import { createLogger } from "../logger/logger.controller.js"; import { createCronJob } from "../utils/croner.utils.js"; import { tryCatch } from "../utils/trycatch.utils.js"; // doing the client stuff first // ┌──────────────── (optional) second (0 - 59) // │ ┌────────────── minute (0 - 59) // │ │ ┌──────────── hour (0 - 23) // │ │ │ ┌────────── day of month (1 - 31) // │ │ │ │ ┌──────── month (1 - 12, JAN-DEC) // │ │ │ │ │ ┌────── day of week (0 - 6, SUN-Mon) // │ │ │ │ │ │ (0 to 6 are Sunday to Saturday; 7 is Sunday, the same as 0) // │ │ │ │ │ │ // * * * * * * export const startDatamartSync = async () => { // setup cronner let cronTime = "*/5 * * * *"; if (process.env.QUERY_TIME_TYPE === "m") { // will run this cron ever x cronTime = `*/${process.env.QUERY_CHECK} * * * *`; } if (process.env.QUERY_TIME_TYPE === "h") { // will run this cron ever x cronTime = `* */${process.env.QUERY_CHECK} * * *`; } // if we are in client mode and in production we run the test to see whats new in the last x if ( process.env.NODE_ENV?.trim() === "production" && process.env.APP_RUNNING_IN?.trim() === "client" ) { createCronJob("dataMartSync", cronTime, async () => { const log = createLogger({ module: "system", subModule: "croner" }); const syncTimeToCheck: number = parseInt( process.env.QUERY_CHECK?.trim() || "5", 10, ); let url = `http://${process.env.SERVER_NAME?.trim()}:3000/lst/api/datamart/sync?time=${syncTimeToCheck}`; // validate how many querys we have const qCount = await db.select({ count: count() }).from(datamart); // if we dont have any queries change to a crazy amount of time console.info(qCount[0]?.count); if ((qCount[0]?.count || 0) < 0) { url = `http://${process.env.SERVER_NAME?.trim()}:3000/lst/api/datamart/sync`; } const { data, error } = await tryCatch(axios.get(url)); if (error !== null) { log.error( { error: error.message }, `There was an error getting the new queries.`, ); return; } //what will we do with the new data passed over log.info({ data: data.data }, `There are to be updated`); const queries = data.data.data; if (queries.length === 0) return; const { data: updateQ, error: UpdateQError } = await tryCatch( db .insert(datamart) .values(queries) .onConflictDoUpdate({ target: datamart.id, set: { name: sql.raw(`excluded.${datamart.name}`), description: sql.raw(`excluded.${datamart.description}`), query: sql.raw(`excluded.${datamart.query}`), version: sql.raw(`excluded.${datamart.version}`), active: sql.raw(`excluded.${datamart.active}`), options: sql.raw(`excluded.${datamart.options}`), public: sql.raw(`excluded.${datamart.public}`), }, }), ); if (UpdateQError !== null) { log.error( { error: UpdateQError }, "There was an error add/updating the queries", ); } if (updateQ) { log.info({}, "New and updated queries have been added"); } }); } };