2 Commits

Author SHA1 Message Date
ea72fd10cd feat(datamart): intial foundation of the datamart setup
this will allow for faster datamart addtions and updates
2025-12-23 19:30:34 -06:00
1b200147b7 test(docker): testing on docker stuff 2025-12-23 19:29:17 -06:00
10 changed files with 179 additions and 36 deletions

View File

@@ -52,6 +52,7 @@
"alpla",
"alplamart",
"alplaprod",
"Datamart",
"intiallally",
"ppoo",
"prodlabels"

View File

@@ -1,42 +1,23 @@
FROM node:24-alpine AS deps
WORKDIR /app
COPY package.json ./
RUN ls -la /app
#RUN mkdir frontend
#RUN mkdir lstDocs
#RUN mkdir controller
#COPY frontend/package*.json ./frontend
#COPY lstDocs/package*.json ./lstDocs
#COPY controller/index.html ./controller
FROM node:24.12-alpine
RUN npm install
#RUN npm run install:front
#RUN npm run install:docs
# Build the Next.js app
FROM node:24-alpine AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
#COPY --from=deps /app/frontend/node_modules ./frontend/node_modules
#COPY --from=deps /app/lstDocs/node_modules ./lstDocs/node_modules
#COPY --from=deps /app/controller/index.html ./controller/index.html
#COPY . ./
RUN npm run build:app
#RUN npm run build:front
#RUN npm run build:docs
# Final stage
FROM node:24-alpine
WORKDIR /app
COPY --from=builder /app/node_modules ./node_modules
# Copy package files
COPY package*.json ./
# Install production dependencies only
RUN npm ci
RUN npm build:app
# Copy built app from builder stage
COPY --from=builder /app/dist ./dist
#COPY --from=builder /app/frontend/dist ./frontend/dist
#COPY --from=builder /app/lstDocs/build ./lstDocs/build
#COPY --from=deps /app/controller/index.html ./controller/index.html
ENV NODE_ENV=production
ENV RUNNING_IN_DOCKER=true
# Environment variables with defaults
ENV PORT=3000
ENV DB_USER=admin
ENV DB_PASSWORD=changeme
EXPOSE 3000
CMD ["node", "dist/index.js"]

42
Dockerfile-bak Normal file
View File

@@ -0,0 +1,42 @@
FROM node:24-alpine AS deps
WORKDIR /app
COPY package.json ./
RUN ls -la /app
#RUN mkdir frontend
#RUN mkdir lstDocs
#RUN mkdir controller
#COPY frontend/package*.json ./frontend
#COPY lstDocs/package*.json ./lstDocs
#COPY controller/index.html ./controller
RUN npm install
#RUN npm run install:front
#RUN npm run install:docs
# Build the Next.js app
FROM node:24-alpine AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
#COPY --from=deps /app/frontend/node_modules ./frontend/node_modules
#COPY --from=deps /app/lstDocs/node_modules ./lstDocs/node_modules
#COPY --from=deps /app/controller/index.html ./controller/index.html
#COPY . ./
RUN npm run build:app
#RUN npm run build:front
#RUN npm run build:docs
# Final stage
FROM node:24-alpine
WORKDIR /app
COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/dist ./dist
#COPY --from=builder /app/frontend/dist ./frontend/dist
#COPY --from=builder /app/lstDocs/build ./lstDocs/build
#COPY --from=deps /app/controller/index.html ./controller/index.html
ENV NODE_ENV=production
ENV RUNNING_IN_DOCKER=true
ENV PORT=3000
EXPOSE 3000
CMD ["node", "dist/index.js"]

View File

@@ -0,0 +1,51 @@
/**
* each endpoint will be something like
* /api/datamart/{name}?{options}
*
* when getting the current queries we will need to map through the available queries we currently have and send back.
* example
*{
* "name": "getopenorders",
* "endpoint": "/api/datamart/getopenorders",
* "description": "Returns open orders based on day count sent over, sDay 15 days in the past eDay 5 days in the future, can be left empty for this default days",
* "criteria": "sDay,eDay"
* },
*
* when a criteria is password over we will handle it by counting how many were passed up to 3 then deal with each one respectively
*/
import { returnFunc } from "../utils/returnHelper.utils.js";
type Data = {
name: string;
criteria: string;
};
export const runDatamartQuery = async (data: Data) => {
// search the query db for the query by name
const dummyquery = {
name: "something",
query: "select * from tableA where start=[start] and end=[end]",
};
// create the query with no changed just to have it here
let datamartQuery = dummyquery.query;
// split the criteria by "," then and then update the query
if (data.criteria) {
const params = new URLSearchParams(data.criteria);
for (const [key, value] of params.entries()) {
datamartQuery = datamartQuery.replaceAll(`[${key}]`, value);
}
}
return returnFunc({
success: true,
level: "info",
module: "datamart",
subModule: "query",
message: `Data for: ${data.name}`,
data: [{ data: datamartQuery }],
notify: false,
});
};

View File

@@ -0,0 +1,13 @@
import type { Express } from "express";
import runQuery from "./getDatamart.route.js";
export const setupDatamartRoutes = (baseUrl: string, app: Express) => {
//setup all the routes
app.use(`${baseUrl}/api/datamart`, runQuery);
app.all("*foo", (_, res) => {
res.status(400).json({
message: "You have encountered a datamart route that dose not exist",
});
});
};

View File

@@ -0,0 +1,22 @@
/**
* If we are running in client mode we want to periodically check the SERVER_NAME for new/updates queries
* this will be on a cronner job, we will check 2 times a day for new data, we will also have a route we can trigger to check this manually incase we have
* queries we make for one plant but will eventually go to all plants.
* in client mode we will not be able to add, update, or delete
*
* if we are running on server mode we will provide all queries.
* allow for new queries to be added
* allow for queries to be updated by id
* table will be
* id
* name
* description
* query
* version
* active
* options (string ie start,end)
* add_date
* add_user
* upd_date
* upd_user
*/

View File

@@ -0,0 +1,25 @@
import { Router } from "express";
import { apiReturn } from "../utils/returnHelper.utils.js";
import { runDatamartQuery } from "./datamart.controller.js";
const r = Router();
r.get("/:name", async (req, res) => {
const { name } = req.params;
const criteria = new URLSearchParams(
req.query as Record<string, string>,
).toString();
const dataRan = await runDatamartQuery({ name, criteria });
apiReturn(res, {
success: dataRan.success,
level: "info",
module: "datamart",
subModule: "query",
message: dataRan.message,
data: dataRan.data,
status: 200,
});
});
export default r;

View File

@@ -0,0 +1,6 @@
/**
* while in client mode we will be connected directly to the postgres and do a nightly backup.
* we will only keep tables relevant, like silo data, inv history, manualPrinting, notifications, printerData,prodlabels, quality request, rfid tags, roles, serverData,...etc
* keeping only the last 7 backups
*
*/

View File

@@ -2,6 +2,7 @@ import type { Express } from "express";
// import the routes and route setups
import { setupApiDocsRoutes } from "./configs/scaler.config.js";
import { setupDatamartRoutes } from "./datamart/datamart.routes.js";
import { setupProdSqlRoutes } from "./prodSql/prodSql.routes.js";
import stats from "./system/stats.route.js";
@@ -9,6 +10,7 @@ export const setupRoutes = (baseUrl: string, app: Express) => {
//setup all the routes
setupApiDocsRoutes(baseUrl, app);
setupProdSqlRoutes(baseUrl, app);
setupDatamartRoutes(baseUrl, app);
app.use(`${baseUrl}/api/stats`, stats);
};

View File

@@ -3,8 +3,8 @@ import { createLogger } from "../logger/logger.controller.js";
interface Data {
success: boolean;
module: "system" | "ocp" | "routes";
subModule: "db" | "labeling" | "printer" | "prodSql";
module: "system" | "ocp" | "routes" | "datamart";
subModule: "db" | "labeling" | "printer" | "prodSql" | "query";
level: "info" | "error" | "debug" | "fatal";
message: string;
data?: unknown[];