Compare commits
53 Commits
7539b1653d
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| c7af1901aa | |||
| 2473bfa702 | |||
| 4dd842b3b8 | |||
| 89ef04cc6f | |||
| 3cec883356 | |||
| 0ecbe29ec1 | |||
| 188331c1ad | |||
| 486e4fb6b8 | |||
| c775bb3354 | |||
| b6968b7b67 | |||
| a0aa75c5a0 | |||
| 78be07c8bb | |||
| 0575a34422 | |||
| 3bc3801ffb | |||
| 4368111311 | |||
| daf9e8a966 | |||
| 8a08d3eac6 | |||
| a761a3634b | |||
| a1a30cffd1 | |||
| 6a631be909 | |||
| 75c17d2065 | |||
| 63c053b38c | |||
| 5bcbdaf3d0 | |||
| 074032f20d | |||
| 13e282e815 | |||
| 6c8ac33be7 | |||
| 92ce51eb7c | |||
| 52ef39fd5c | |||
| 623e19f028 | |||
| 14dd87e335 | |||
| 52956ecaa4 | |||
| cc51807819 | |||
| 8e039037a9 | |||
| ed40f4c77e | |||
| ccddef8ba8 | |||
| 05e31f4b9e | |||
| d2578b8850 | |||
| 392a9ef407 | |||
| 2d2337257f | |||
| 5a95bf3ef0 | |||
| 7e9401d8bb | |||
| 0c4465c91a | |||
| 5f9d49561a | |||
| 253d998b68 | |||
| 9acfd1ccd0 | |||
| 39d23f4a8a | |||
| f41a1b3363 | |||
| 1e649d1f23 | |||
| 5cc1fbe919 | |||
| 528d7af031 | |||
| f0bcea0405 | |||
| 67b5976176 | |||
| 4ef2d90aa8 |
36
.env-example
36
.env-example
@@ -1,11 +1,35 @@
|
||||
# uncomment this out to run in productions
|
||||
# APP_ENV=production
|
||||
|
||||
# Server port that will allow vite to talk to the backend.
|
||||
VITE_SERVER_PORT=4000
|
||||
|
||||
# lstv2 loc
|
||||
LSTV2="C\drive\loc"
|
||||
|
||||
# discord - this us used to monitor the logs and make sure we never have a critial shut down.
|
||||
# this will be for other critical stuff like nice label and some other events to make sure we are still in a good spot and dont need to jump in
|
||||
WEBHOOK=
|
||||
|
||||
# dev stuff below
|
||||
|
||||
# Gitea Info
|
||||
GITEA_URL=git.tuffraid.net
|
||||
GITEA_USERNAME=cowch
|
||||
GITEA_URL=git repo
|
||||
GITEA_USERNAME=username
|
||||
GITEA_REPO=logistics_support_tool
|
||||
GITEA_TOKEN=ad8eac91a01e3a1885a1dc10
|
||||
|
||||
# Build number info
|
||||
BUILD_NAME=rushjnnhj7212n
|
||||
# postgres db
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5433
|
||||
DB_USER=username
|
||||
DB_PASSWORD=password
|
||||
DB_NAME=lst # db must be created before you start the app
|
||||
|
||||
# lstv2 loc
|
||||
LSTV2=C\drive\loc
|
||||
# dev locs
|
||||
DEV_FOLDER=C\drive\loc
|
||||
ADMUSER=username
|
||||
ADMPASSWORD=password
|
||||
|
||||
# Build number info
|
||||
BUILD_NAME=leBlfRaj
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -9,6 +9,8 @@ LstWrapper/publish
|
||||
LstWrapper/obj
|
||||
scripts/tmp
|
||||
backend/docs
|
||||
backend/frontend
|
||||
testFolder
|
||||
|
||||
# ---> Go
|
||||
# If you prefer the allow list template instead of the deny list, see community template:
|
||||
@@ -190,4 +192,6 @@ backend/go.sum
|
||||
BUILD_NUMBER
|
||||
scripts/resetDanger.js
|
||||
LstWrapper/Program_vite_as_Static.txt
|
||||
LstWrapper/Program_proxy_backend.txt
|
||||
scripts/stopPool.go
|
||||
backend_bad_practice
|
||||
11
.vscode/settings.json
vendored
11
.vscode/settings.json
vendored
@@ -24,5 +24,12 @@
|
||||
},
|
||||
"[handlebars]": {
|
||||
"editor.formatOnSave": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "golang.go"
|
||||
},
|
||||
|
||||
// Optional: Configure goimports instead of gofmt
|
||||
"go.formatTool": "goimports"
|
||||
}
|
||||
|
||||
99
CHANGELOG.md
99
CHANGELOG.md
@@ -3,6 +3,105 @@
|
||||
All notable changes to LST will be documented in this file.
|
||||
|
||||
|
||||
## [0.0.1-alpha.6](https://git.tuffraid.net/cowch/logistics_support_tool/compare/v0.0.1-alpha.5...v0.0.1-alpha.6) (2025-07-31)
|
||||
|
||||
### 🌟 Enhancements
|
||||
|
||||
* **logging:** added in db and logging with websocket ([52ef39f](https://git.tuffraid.net/cowch/logistics_support_tool/commit/52ef39fd5c129ed02ed9f38dbf7e49ae06807ad6))
|
||||
* **settings:** migrated all settings endpoints confirmed as well for updates ([0575a34](https://git.tuffraid.net/cowch/logistics_support_tool/commit/0575a344229ba0ff5c0f47781c6d596e5c08e5eb))
|
||||
* **ws server:** added in a websocket on port system to help with better logging ([5bcbdaf](https://git.tuffraid.net/cowch/logistics_support_tool/commit/5bcbdaf3d0e889729d4dce3df51f4330d7793868))
|
||||
|
||||
### 🐛 Bug fixes
|
||||
|
||||
* **update server:** fixed to make sure everything is stopped before doing the remaining update ([13e282e](https://git.tuffraid.net/cowch/logistics_support_tool/commit/13e282e815c1c95a0a5298ede2f6497cdf036440))
|
||||
* **websocket:** errors in saving client info during ping ping ([4368111](https://git.tuffraid.net/cowch/logistics_support_tool/commit/4368111311c48e73a11a6b24febdcc3be31a2a59))
|
||||
* **wrapper:** corrections to properly handle websockets :D ([a761a36](https://git.tuffraid.net/cowch/logistics_support_tool/commit/a761a3634b6cb0aeeb571dd634bd158cee530779))
|
||||
|
||||
### 📚 Documentation
|
||||
|
||||
* **.env example:** added postrgres example ([14dd87e](https://git.tuffraid.net/cowch/logistics_support_tool/commit/14dd87e335a63d76d64c07a15cf593cb286a9833))
|
||||
* **dockerbuild:** comments as a reminder for my seld ([52956ec](https://git.tuffraid.net/cowch/logistics_support_tool/commit/52956ecaa45cd556ba7832d6cb9ec2cf883d983a))
|
||||
* **docker:** docs about the custom network for the db is seperated ([6a631be](https://git.tuffraid.net/cowch/logistics_support_tool/commit/6a631be909b56a899af393510edffd70d7901a7a))
|
||||
* **wss:** more ws stuff ([63c053b](https://git.tuffraid.net/cowch/logistics_support_tool/commit/63c053b38ce3ab3c3a94cda620da930f4e8615bd))
|
||||
|
||||
### 🛠️ Code Refactor
|
||||
|
||||
* **app port:** changed to have the port be dyncamic on the iis side ([074032f](https://git.tuffraid.net/cowch/logistics_support_tool/commit/074032f20dc90810416c5899e44fefe86b52f98a))
|
||||
* **build:** added back in the build name stuff ([92ce51e](https://git.tuffraid.net/cowch/logistics_support_tool/commit/92ce51eb7cf14ebb599c29fea4721e21badafbf6))
|
||||
* **config:** changed to settings to match the other lst in node. makes it more easy to manage ([3bc3801](https://git.tuffraid.net/cowch/logistics_support_tool/commit/3bc3801ffbb544a814d52c72e566e8d4866a7f38))
|
||||
* **createzip:** added in env-example to the zip file ([6c8ac33](https://git.tuffraid.net/cowch/logistics_support_tool/commit/6c8ac33be73f203137b883e33feb625ccc0945e9))
|
||||
* **docker compose example:** added in postgress stuff plus network ([623e19f](https://git.tuffraid.net/cowch/logistics_support_tool/commit/623e19f028d27fbfc46bee567ce78169cddba8fb))
|
||||
* **settings:** changed config to settings and added in the update method for this as well ([a0aa75c](https://git.tuffraid.net/cowch/logistics_support_tool/commit/a0aa75c5a0b4a6e3a10b88bbcccf43d096e532b4))
|
||||
* **wrapper:** removed the logger stuff so we dont fill up space ([8a08d3e](https://git.tuffraid.net/cowch/logistics_support_tool/commit/8a08d3eac6540b00ff23115936d56b4f22f16d53))
|
||||
* **ws:** ws logging and channel manager added no auth currently ([a1a30cf](https://git.tuffraid.net/cowch/logistics_support_tool/commit/a1a30cffd18e02e1061959fa3164f8237522880c))
|
||||
|
||||
### 🚀 Performance
|
||||
|
||||
* **websocket:** added in base url to help with ssl stuff and iis ([daf9e8a](https://git.tuffraid.net/cowch/logistics_support_tool/commit/daf9e8a966fd440723b1aec932a02873a5e27eb7))
|
||||
|
||||
### 📝 Testing Code
|
||||
|
||||
* **iis:** wrapper test for ws ([75c17d2](https://git.tuffraid.net/cowch/logistics_support_tool/commit/75c17d20659dcc5a762e00928709c4d3dd277284))
|
||||
|
||||
### 📈 Project changes
|
||||
|
||||
* **hotreload:** added in air for hot reloading ([78be07c](https://git.tuffraid.net/cowch/logistics_support_tool/commit/78be07c8bbf5acbcdac65351f693941f47be4cb5))
|
||||
|
||||
## [0.0.1-alpha.5](https://git.tuffraid.net/cowch/logistics_support_tool/compare/v0.0.1-alpha.4...v0.0.1-alpha.5) (2025-07-21)
|
||||
|
||||
### 🌟 Enhancements
|
||||
|
||||
* **backend:** env added ([67b5976](https://git.tuffraid.net/cowch/logistics_support_tool/commit/67b59761769350951bc6b52ef715b592b5d4a862))
|
||||
* **backend:** set the static path to the docs ([5a9636c](https://git.tuffraid.net/cowch/logistics_support_tool/commit/5a9636cdc15f164ed3547f544e34858683b38241))
|
||||
* **docs:** added in the new docs to build alongside the backend ([3fdcc11](https://git.tuffraid.net/cowch/logistics_support_tool/commit/3fdcc110e3b4d7356af1fb025070bdf7413a8e88))
|
||||
* **env-example:** added in an example env ([a23b6a6](https://git.tuffraid.net/cowch/logistics_support_tool/commit/a23b6a6e9eef4dbeb2f84c325ca8dca178ab3ff3))
|
||||
* **env:** new env introduced to handle dev and prodution ([f41a1b3](https://git.tuffraid.net/cowch/logistics_support_tool/commit/f41a1b336389d6100e42681f53d9f618c8726f25))
|
||||
* **services:** just a 1 to 1 from lstv2 ([8e03903](https://git.tuffraid.net/cowch/logistics_support_tool/commit/8e039037a9b40994b9e77f59680a3ce1b6ebc3a0))
|
||||
* **update server:** new update server added with iis stop and old version included ([05e31f4](https://git.tuffraid.net/cowch/logistics_support_tool/commit/05e31f4b9e20799257da244b237420fa2b6435f8))
|
||||
|
||||
### 🐛 Bug fixes
|
||||
|
||||
* **backend:** ignored docs this should be built before running by the user ([fdf14b0](https://git.tuffraid.net/cowch/logistics_support_tool/commit/fdf14b06c88d3057f31184e03fb592bd9a959847))
|
||||
* **builds:** added in the gets/installs for our go project and node portons ([528d7af](https://git.tuffraid.net/cowch/logistics_support_tool/commit/528d7af0312cb3de43ddc93d8af22bde6aadea52))
|
||||
* **docs:** added a copy script to cp the build to the backend with ps1 ([0fb2ec5](https://git.tuffraid.net/cowch/logistics_support_tool/commit/0fb2ec52739def8294a57bbc6c497ba6531568a6))
|
||||
|
||||
### 📚 Documentation
|
||||
|
||||
* **env:** changes to have more clear info on the example doc ([5f9d495](https://git.tuffraid.net/cowch/logistics_support_tool/commit/5f9d49561a7c22a4d2cb85bd06bbbdb9fa952224))
|
||||
* **iiscontrol:** added in an example how to run i t ([ccddef8](https://git.tuffraid.net/cowch/logistics_support_tool/commit/ccddef8ba8f64774db6d99fba3ea7c7c54bea1a5))
|
||||
|
||||
### 🛠️ Code Refactor
|
||||
|
||||
* **backend:** changes to convert the backend to strictly the app ([1e649d1](https://git.tuffraid.net/cowch/logistics_support_tool/commit/1e649d1f23c404252754d746254810386bb0f233))
|
||||
* **build:** changes to remove the build name as it was not really realvent ([392a9ef](https://git.tuffraid.net/cowch/logistics_support_tool/commit/392a9ef407d4e64f573cbfc9109c8a81f55c14d5))
|
||||
* **config:** changes to autoformat go files ([4ef2d90](https://git.tuffraid.net/cowch/logistics_support_tool/commit/4ef2d90aa8595d5e3d18a289c012320bdf0dcc4a))
|
||||
* **createzip:** added in verbage to be clear what was done at the end ([452bdbe](https://git.tuffraid.net/cowch/logistics_support_tool/commit/452bdbedb48cde7fa0ef246fb61e304127c49e58))
|
||||
* **createzip:** changes to the way the app looks for better understanding ([d2578b8](https://git.tuffraid.net/cowch/logistics_support_tool/commit/d2578b885029ca98b750f4c6996e567053b2e517))
|
||||
* **createzip:** renamed the backend to app now that everything is in one ([0c4465c](https://git.tuffraid.net/cowch/logistics_support_tool/commit/0c4465c91a40cbea73048617952df26b476d01f4))
|
||||
* **docker:** removed frontend from being built ([9acfd1c](https://git.tuffraid.net/cowch/logistics_support_tool/commit/9acfd1ccd0b9019d5acc01153ddad15b82b6c74e))
|
||||
* **env-example:** changes to the env example to include the dev stuff ([2d23372](https://git.tuffraid.net/cowch/logistics_support_tool/commit/2d2337257f2b31c740f9eb7064be010528e14c7c))
|
||||
* **frontend:** changes to no longer use server side and only static files ([39d23f4](https://git.tuffraid.net/cowch/logistics_support_tool/commit/39d23f4a8a8af6b1acb87b913e5cd1929fe144e4))
|
||||
* **lstv2build:** changes to not always build the old app if we dont need too ([ed40f4c](https://git.tuffraid.net/cowch/logistics_support_tool/commit/ed40f4c77e9d81e36292f38c342e9c2b062f84b9))
|
||||
* **lstv2:** moved the loc to .env file ([7539b16](https://git.tuffraid.net/cowch/logistics_support_tool/commit/7539b1653d7a48dbe248847ed321ab065e58efa0))
|
||||
* **wrapper:** changes to handle docs and frontned now ([253d998](https://git.tuffraid.net/cowch/logistics_support_tool/commit/253d998b68b5808d6fd2d9731255616238fcdb71))
|
||||
|
||||
### 🚀 Performance
|
||||
|
||||
* **docs:** changes to stop the server from opening a browser when it started up ([f0bcea0](https://git.tuffraid.net/cowch/logistics_support_tool/commit/f0bcea0405db364e1e15471b5db74fc9d8f93788))
|
||||
|
||||
### 📝 Testing Code
|
||||
|
||||
* **app:** added production into the build so we dont fill logs up ([7e9401d](https://git.tuffraid.net/cowch/logistics_support_tool/commit/7e9401d8bb12589de6ce1517e4502d784788ab0f))
|
||||
* **docker:** changes to make all latest now instead of 2 apps ([5a95bf3](https://git.tuffraid.net/cowch/logistics_support_tool/commit/5a95bf3ef0f4d4b125f8e777d57cc96b3d3a894d))
|
||||
|
||||
### 📈 Project changes
|
||||
|
||||
* **wrapper:** changes to clean the publish folder ([5cc1fbe](https://git.tuffraid.net/cowch/logistics_support_tool/commit/5cc1fbe919a8c3fa46617f7b9ed2830559b8978f))
|
||||
|
||||
### 📈 Project Builds
|
||||
|
||||
* **docs:** added building the docs into the build script ([6072afc](https://git.tuffraid.net/cowch/logistics_support_tool/commit/6072afc8c01a4fe4980e0e17cfe41d5a9cd524ef))
|
||||
|
||||
## [0.0.1-alpha.4](https://git.tuffraid.net/cowch/logistics_support_tool/compare/v0.0.1-alpha.3...v0.0.1-alpha.4) (2025-07-16)
|
||||
|
||||
### 📈 Project changes
|
||||
|
||||
@@ -1,47 +1,96 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Net;
|
||||
using System.Net.WebSockets;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using System.Net.Http;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
// to build the binary dotnet publish -c Release -o ./publish
|
||||
// Go backend
|
||||
builder.Services.AddHttpClient("GoBackend", client =>
|
||||
{
|
||||
client.BaseAddress = new Uri("http://localhost:8080");
|
||||
client.Timeout = TimeSpan.FromSeconds(30);
|
||||
});
|
||||
|
||||
// Node frontend
|
||||
builder.Services.AddHttpClient("NodeFrontend", client =>
|
||||
{
|
||||
client.BaseAddress = new Uri("http://localhost:3000");
|
||||
client.Timeout = TimeSpan.FromSeconds(30);
|
||||
});
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
app.UseStaticFiles();
|
||||
// Enable WebSocket support
|
||||
app.UseWebSockets();
|
||||
|
||||
app.Use((Func<HttpContext, Func<Task>, Task>)(async (context, next) =>
|
||||
// Logging method
|
||||
void LogToFile(string message)
|
||||
{
|
||||
var clientFactory = context.RequestServices.GetRequiredService<IHttpClientFactory>();
|
||||
try
|
||||
{
|
||||
string logDir = Path.Combine(AppContext.BaseDirectory, "logs");
|
||||
Directory.CreateDirectory(logDir);
|
||||
string logFilePath = Path.Combine(logDir, "proxy_log.txt");
|
||||
File.AppendAllText(logFilePath, $"{DateTime.UtcNow}: {message}{Environment.NewLine}");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Handle potential errors writing to log file
|
||||
Console.WriteLine($"Logging error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
var isApiRequest =
|
||||
context.Request.Path.StartsWithSegments("/api") ||
|
||||
context.Request.Path.StartsWithSegments("/graphql") ||
|
||||
context.Request.Path.StartsWithSegments("/auth") ||
|
||||
!context.Request.Method.Equals("GET", StringComparison.OrdinalIgnoreCase);
|
||||
// Middleware to handle WebSocket requests
|
||||
app.Use(async (context, next) =>
|
||||
{
|
||||
if (context.WebSockets.IsWebSocketRequest && context.Request.Path.StartsWithSegments("/ws"))
|
||||
{
|
||||
// LogToFile($"WebSocket request received for path: {context.Request.Path}");
|
||||
|
||||
var client = clientFactory.CreateClient(isApiRequest ? "GoBackend" : "NodeFrontend");
|
||||
try
|
||||
{
|
||||
var backendUri = new UriBuilder("ws", "localhost", 8080)
|
||||
{
|
||||
Path = context.Request.Path,
|
||||
Query = context.Request.QueryString.ToString()
|
||||
}.Uri;
|
||||
|
||||
using var backendSocket = new ClientWebSocket();
|
||||
await backendSocket.ConnectAsync(backendUri, context.RequestAborted);
|
||||
|
||||
using var frontendSocket = await context.WebSockets.AcceptWebSocketAsync();
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
// WebSocket forwarding tasks
|
||||
var forwardToBackend = ForwardWebSocketAsync(frontendSocket, backendSocket, cts.Token);
|
||||
var forwardToFrontend = ForwardWebSocketAsync(backendSocket, frontendSocket, cts.Token);
|
||||
|
||||
await Task.WhenAny(forwardToBackend, forwardToFrontend);
|
||||
cts.Cancel();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//LogToFile($"WebSocket proxy error: {ex.Message}");
|
||||
context.Response.StatusCode = (int)HttpStatusCode.BadGateway;
|
||||
await context.Response.WriteAsync($"WebSocket proxy error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
await next();
|
||||
}
|
||||
});
|
||||
|
||||
// Middleware to handle HTTP requests
|
||||
app.Use(async (context, next) =>
|
||||
{
|
||||
if (context.WebSockets.IsWebSocketRequest)
|
||||
{
|
||||
await next();
|
||||
return;
|
||||
}
|
||||
|
||||
var client = context.RequestServices.GetRequiredService<IHttpClientFactory>().CreateClient("GoBackend");
|
||||
|
||||
try
|
||||
{
|
||||
var requestUri = context.Request.Path + context.Request.QueryString;
|
||||
|
||||
var request = new HttpRequestMessage(
|
||||
new HttpMethod(context.Request.Method),
|
||||
requestUri);
|
||||
var request = new HttpRequestMessage(new HttpMethod(context.Request.Method),
|
||||
context.Request.Path + context.Request.QueryString);
|
||||
|
||||
foreach (var header in context.Request.Headers)
|
||||
{
|
||||
@@ -52,13 +101,12 @@ app.Use((Func<HttpContext, Func<Task>, Task>)(async (context, next) =>
|
||||
}
|
||||
}
|
||||
|
||||
if (context.Request.ContentLength > 0 || context.Request.Headers.ContainsKey("Transfer-Encoding"))
|
||||
if (context.Request.ContentLength > 0 && request.Content == null)
|
||||
{
|
||||
request.Content = new StreamContent(context.Request.Body);
|
||||
}
|
||||
|
||||
var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, context.RequestAborted);
|
||||
|
||||
context.Response.StatusCode = (int)response.StatusCode;
|
||||
|
||||
foreach (var header in response.Headers)
|
||||
@@ -72,14 +120,39 @@ app.Use((Func<HttpContext, Func<Task>, Task>)(async (context, next) =>
|
||||
}
|
||||
|
||||
context.Response.Headers.Remove("transfer-encoding");
|
||||
|
||||
await response.Content.CopyToAsync(context.Response.Body);
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
context.Response.StatusCode = isApiRequest ? 503 : 502;
|
||||
await context.Response.WriteAsync($"{(isApiRequest ? "Go API" : "Frontend")} unavailable: {ex.Message}");
|
||||
LogToFile($"HTTP proxy error: {ex.Message}");
|
||||
context.Response.StatusCode = (int)HttpStatusCode.BadGateway;
|
||||
await context.Response.WriteAsync($"Backend request failed: {ex.Message}");
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
async Task ForwardWebSocketAsync(WebSocket source, WebSocket destination, CancellationToken cancellationToken)
|
||||
{
|
||||
var buffer = new byte[4 * 1024];
|
||||
try
|
||||
{
|
||||
while (source.State == WebSocketState.Open &&
|
||||
destination.State == WebSocketState.Open &&
|
||||
!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
var result = await source.ReceiveAsync(new ArraySegment<byte>(buffer), cancellationToken);
|
||||
if (result.MessageType == WebSocketMessageType.Close)
|
||||
{
|
||||
await destination.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Closing", cancellationToken);
|
||||
break;
|
||||
}
|
||||
await destination.SendAsync(new ArraySegment<byte>(buffer, 0, result.Count), result.MessageType, result.EndOfMessage, cancellationToken);
|
||||
}
|
||||
}
|
||||
catch (WebSocketException ex)
|
||||
{
|
||||
LogToFile($"WebSocket forwarding error: {ex.Message}");
|
||||
await destination.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Error", cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
app.Run();
|
||||
@@ -1,15 +1,36 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<location path="." inheritInChildApplications="false">
|
||||
<system.webServer>
|
||||
<handlers>
|
||||
<add name="aspNetCore" path="*" verb="*" modules="AspNetCoreModuleV2" resourceType="Unspecified" />
|
||||
</handlers>
|
||||
<aspNetCore processPath="dotnet"
|
||||
arguments=".\LstWrapper.dll"
|
||||
stdoutLogEnabled="true"
|
||||
stdoutLogFile=".\logs\stdout"
|
||||
hostingModel="inprocess" />
|
||||
</system.webServer>
|
||||
</location>
|
||||
</configuration>
|
||||
<system.webServer>
|
||||
<!-- Enable WebSockets -->
|
||||
<webSocket enabled="true" receiveBufferLimit="4194304" pingInterval="00:01:00" />
|
||||
|
||||
<rewrite>
|
||||
<rules>
|
||||
<!-- Proxy all requests starting with /lst/ to the .NET wrapper (port 4000) -->
|
||||
<rule name="Proxy to Wrapper" stopProcessing="true">
|
||||
<match url="^lst/(.*)" />
|
||||
<conditions>
|
||||
<!-- Skip this rule if it's a WebSocket request -->
|
||||
<add input="{HTTP_UPGRADE}" pattern="^WebSocket$" negate="true" />
|
||||
</conditions>
|
||||
<action type="Rewrite" url="http://localhost:8080/{R:1}" />
|
||||
</rule>
|
||||
</rules>
|
||||
</rewrite>
|
||||
|
||||
<staticContent>
|
||||
<mimeMap fileExtension=".js" mimeType="application/javascript" />
|
||||
<mimeMap fileExtension=".mjs" mimeType="application/javascript" />
|
||||
<mimeMap fileExtension=".css" mimeType="text/css" />
|
||||
<mimeMap fileExtension=".svg" mimeType="image/svg+xml" />
|
||||
</staticContent>
|
||||
|
||||
<handlers>
|
||||
<!-- Let AspNetCoreModule handle all requests -->
|
||||
<remove name="WebSocketHandler" />
|
||||
<add name="aspNetCore" path="*" verb="*" modules="AspNetCoreModuleV2" resourceType="Unspecified" />
|
||||
</handlers>
|
||||
|
||||
<aspNetCore processPath="dotnet" arguments=".\LstWrapper.dll" stdoutLogEnabled="false" stdoutLogFile=".\logs\stdout" hostingModel="inprocess" />
|
||||
</system.webServer>
|
||||
</configuration>
|
||||
|
||||
@@ -10,3 +10,5 @@ this will also include a primary server to house all the common configs across a
|
||||
|
||||
The new lst will run in docker by building your own image and deploying or pulling the image down.
|
||||
you will also be able to run it in windows or linux.
|
||||
|
||||
when developing in lst and you want to run hotloads installed and configure https://github.com/air-verse/air
|
||||
|
||||
0
backend/.air.toml
Normal file
0
backend/.air.toml
Normal file
2
backend/.dockerignore
Normal file
2
backend/.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
||||
lst_backend.exe
|
||||
lst.net.exe
|
||||
@@ -3,19 +3,25 @@ FROM golang:1.24.4-alpine3.22 AS builder
|
||||
WORKDIR /app
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
# COPY VERSION ./VERSION
|
||||
COPY docs /app/docs/
|
||||
COPY frontend /app/frontend/
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o lst_go ./main.go
|
||||
|
||||
FROM alpine:latest
|
||||
|
||||
WORKDIR /root/
|
||||
|
||||
# Copy only the binary (no need for source files)
|
||||
RUN mkdir -p ./docs ./frontend
|
||||
|
||||
COPY --from=builder /app/lst_go .
|
||||
# COPY --from=builder /app/VERSION ./
|
||||
COPY --from=builder /app/docs ./docs/
|
||||
COPY --from=builder /app/frontend ./frontend/
|
||||
|
||||
# create the volume paths
|
||||
RUN mkdir -p /data
|
||||
|
||||
@@ -2,33 +2,52 @@ module lst.net
|
||||
|
||||
go 1.24.3
|
||||
|
||||
require github.com/gin-gonic/gin v1.10.1
|
||||
require (
|
||||
github.com/bensch777/discord-webhook-golang v0.0.6
|
||||
github.com/gin-contrib/cors v1.7.6
|
||||
github.com/gin-gonic/gin v1.10.1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/lib/pq v1.10.9
|
||||
github.com/rs/zerolog v1.34.0
|
||||
gorm.io/driver/postgres v1.6.0
|
||||
gorm.io/gorm v1.30.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/bytedance/sonic v1.11.6 // indirect
|
||||
github.com/bytedance/sonic/loader v0.1.1 // indirect
|
||||
github.com/cloudwego/base64x v0.1.4 // indirect
|
||||
github.com/bytedance/sonic v1.13.3 // indirect
|
||||
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/cloudwego/iasm v0.2.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.20.0 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/go-playground/validator/v10 v10.26.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jackc/pgx/v5 v5.7.5 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
golang.org/x/arch v0.8.0 // indirect
|
||||
golang.org/x/crypto v0.23.0 // indirect
|
||||
golang.org/x/net v0.25.0 // indirect
|
||||
golang.org/x/sys v0.20.0 // indirect
|
||||
golang.org/x/text v0.15.0 // indirect
|
||||
google.golang.org/protobuf v1.34.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
golang.org/x/arch v0.18.0 // indirect
|
||||
golang.org/x/crypto v0.40.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/sync v0.16.0 // indirect
|
||||
golang.org/x/sys v0.34.0 // indirect
|
||||
golang.org/x/text v0.27.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
51
backend/internal/db/db.go
Normal file
51
backend/internal/db/db.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"gorm.io/driver/postgres"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/models"
|
||||
)
|
||||
|
||||
var DB *gorm.DB
|
||||
|
||||
type DBConfig struct {
|
||||
DB *gorm.DB
|
||||
DSN string
|
||||
}
|
||||
|
||||
func InitDB() (*DBConfig, error) {
|
||||
dsn := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s",
|
||||
os.Getenv("DB_HOST"),
|
||||
os.Getenv("DB_PORT"),
|
||||
os.Getenv("DB_USER"),
|
||||
os.Getenv("DB_PASSWORD"),
|
||||
os.Getenv("DB_NAME"))
|
||||
|
||||
var err error
|
||||
|
||||
DB, err = gorm.Open(postgres.Open(dsn), &gorm.Config{})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to connect to database: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("✅ Connected to database")
|
||||
|
||||
// ensures we have the uuid stuff setup properly
|
||||
DB.Exec(`CREATE EXTENSION IF NOT EXISTS "uuid-ossp"`)
|
||||
|
||||
err = DB.AutoMigrate(&models.Log{}, &models.Settings{}) // &ClientRecord{}, &Servers{}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to auto-migrate models: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("✅ Database migration completed successfully")
|
||||
|
||||
return &DBConfig{
|
||||
DB: DB,
|
||||
DSN: dsn,
|
||||
}, nil
|
||||
}
|
||||
21
backend/internal/models/logs.go
Normal file
21
backend/internal/models/logs.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/pkg"
|
||||
)
|
||||
|
||||
type Log struct {
|
||||
LogID uuid.UUID `gorm:"type:uuid;default:uuid_generate_v4();primaryKey" json:"id"`
|
||||
Level string `gorm:"size:10;not null"` // "info", "error", etc.
|
||||
Message string `gorm:"not null"`
|
||||
Service string `gorm:"size:50"`
|
||||
Metadata pkg.JSONB `gorm:"type:jsonb"` // fields (e.g., {"user_id": 123})
|
||||
CreatedAt time.Time `gorm:"index"`
|
||||
Checked bool `gorm:"type:boolean;default:false"`
|
||||
UpdatedAt time.Time
|
||||
DeletedAt gorm.DeletedAt `gorm:"index"`
|
||||
}
|
||||
32
backend/internal/models/servers.go
Normal file
32
backend/internal/models/servers.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"lst.net/pkg"
|
||||
)
|
||||
|
||||
type Servers struct {
|
||||
ServerID uuid.UUID `gorm:"type:uuid;default:uuid_generate_v4();primaryKey" json:"id"`
|
||||
ServerName string `gorm:"size:50;not null"`
|
||||
ServerDNS string `gorm:"size:25;not null"`
|
||||
PlantToken string `gorm:"size:10;not null"`
|
||||
IPAddress string `gorm:"size:16;not null"`
|
||||
GreatPlainsPlantCode int `gorm:"size:10;not null"`
|
||||
StreetAddress string `gorm:"size:255;not null"`
|
||||
CityState string `gorm:"size:50;not null"`
|
||||
Zipcode int `gorm:"size:13;not null"`
|
||||
ContactEmail string `gorm:"size:255"`
|
||||
ContactPhone string `gorm:"size:255"`
|
||||
CustomerTiAcc string `gorm:"size:255"`
|
||||
LstServerPort int `gorm:"size:255; not null"`
|
||||
Active bool `gorm:"type:boolean;default:true"`
|
||||
LerverLoc string `gorm:"size:255:not null"`
|
||||
LastUpdated time.Time `gorm:"index"`
|
||||
ShippingHours pkg.JSONB `gorm:"type:jsonb;default:'[{\"early\": \"06:30\", \"late\": \"23:00\"}]'"`
|
||||
TiPostTime pkg.JSONB `gorm:"type:jsonb;default:'[{\"from\": \"24\", \"to\": \"24\"}]'"`
|
||||
OtherSettings pkg.JSONB `gorm:"type:jsonb;default:'[{\"specialInstructions\": \"something for ti\", \"active\": false}]'"`
|
||||
IsUpgrading bool `gorm:"type:boolean;default:true"`
|
||||
AlplaProdApiKey string `gorm:"size:255"`
|
||||
}
|
||||
20
backend/internal/models/settings.go
Normal file
20
backend/internal/models/settings.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type Settings struct {
|
||||
SettingID uuid.UUID `gorm:"type:uuid;default:uuid_generate_v4();primaryKey" json:"id"`
|
||||
Name string `gorm:"uniqueIndex;not null"`
|
||||
Description string `gorm:"type:text"`
|
||||
Value string `gorm:"not null"`
|
||||
Enabled bool `gorm:"default:true"`
|
||||
AppService string `gorm:"default:system"`
|
||||
CreatedAt time.Time `gorm:"index"`
|
||||
UpdatedAt time.Time `gorm:"index"`
|
||||
DeletedAt gorm.DeletedAt `gorm:"index"`
|
||||
}
|
||||
21
backend/internal/models/ws_client.go
Normal file
21
backend/internal/models/ws_client.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"lst.net/pkg"
|
||||
)
|
||||
|
||||
type ClientRecord struct {
|
||||
ClientID uuid.UUID `gorm:"type:uuid;default:uuid_generate_v4();primaryKey" json:"id"`
|
||||
APIKey string `gorm:"not null"`
|
||||
IPAddress string `gorm:"not null"`
|
||||
UserAgent string `gorm:"size:255"`
|
||||
ConnectedAt time.Time `gorm:"index"`
|
||||
LastHeartbeat time.Time `gorm:"column:last_heartbeat"`
|
||||
Channels pkg.JSONB `gorm:"type:jsonb"`
|
||||
CreatedAt time.Time
|
||||
UpdatedAt time.Time
|
||||
DisconnectedAt *time.Time `gorm:"column:disconnected_at"`
|
||||
}
|
||||
179
backend/internal/notifications/ws/ws_channel_manager.go
Normal file
179
backend/internal/notifications/ws/ws_channel_manager.go
Normal file
@@ -0,0 +1,179 @@
|
||||
package ws
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
type Channel struct {
|
||||
Name string
|
||||
Clients map[*Client]bool
|
||||
Register chan *Client
|
||||
Unregister chan *Client
|
||||
Broadcast chan []byte
|
||||
lock sync.RWMutex
|
||||
}
|
||||
|
||||
var (
|
||||
channels = make(map[string]*Channel)
|
||||
channelsMu sync.RWMutex
|
||||
)
|
||||
|
||||
// InitializeChannels creates and returns all channels
|
||||
func InitializeChannels() {
|
||||
channelsMu.Lock()
|
||||
defer channelsMu.Unlock()
|
||||
|
||||
channels["logServices"] = NewChannel("logServices")
|
||||
channels["labels"] = NewChannel("labels")
|
||||
// Add more channels here as needed
|
||||
}
|
||||
|
||||
func NewChannel(name string) *Channel {
|
||||
return &Channel{
|
||||
Name: name,
|
||||
Clients: make(map[*Client]bool),
|
||||
Register: make(chan *Client),
|
||||
Unregister: make(chan *Client),
|
||||
Broadcast: make(chan []byte),
|
||||
}
|
||||
}
|
||||
|
||||
func GetChannel(name string) (*Channel, bool) {
|
||||
channelsMu.RLock()
|
||||
defer channelsMu.RUnlock()
|
||||
ch, exists := channels[name]
|
||||
return ch, exists
|
||||
}
|
||||
|
||||
func GetAllChannels() map[string]*Channel {
|
||||
channelsMu.RLock()
|
||||
defer channelsMu.RUnlock()
|
||||
|
||||
chs := make(map[string]*Channel)
|
||||
for k, v := range channels {
|
||||
chs[k] = v
|
||||
}
|
||||
return chs
|
||||
}
|
||||
|
||||
func StartAllChannels() {
|
||||
|
||||
channelsMu.RLock()
|
||||
defer channelsMu.RUnlock()
|
||||
|
||||
for _, ch := range channels {
|
||||
go ch.RunChannel()
|
||||
}
|
||||
}
|
||||
|
||||
func CleanupChannels() {
|
||||
channelsMu.Lock()
|
||||
defer channelsMu.Unlock()
|
||||
|
||||
for _, ch := range channels {
|
||||
close(ch.Broadcast)
|
||||
// Add any other cleanup needed
|
||||
}
|
||||
channels = make(map[string]*Channel)
|
||||
}
|
||||
|
||||
func StartBroadcasting(broadcaster chan logger.Message, channels map[string]*Channel) {
|
||||
logger := logger.New()
|
||||
go func() {
|
||||
for msg := range broadcaster {
|
||||
switch msg.Channel {
|
||||
case "logServices":
|
||||
// Just forward the message - filtering happens in RunChannel()
|
||||
messageBytes, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
logger.Error("Error marshaling message", "websocket", map[string]interface{}{
|
||||
"errors": err,
|
||||
})
|
||||
continue
|
||||
}
|
||||
channels["logServices"].Broadcast <- messageBytes
|
||||
|
||||
case "labels":
|
||||
// Future labels handling
|
||||
messageBytes, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
logger.Error("Error marshaling message", "websocket", map[string]interface{}{
|
||||
"errors": err,
|
||||
})
|
||||
continue
|
||||
}
|
||||
channels["labels"].Broadcast <- messageBytes
|
||||
|
||||
default:
|
||||
log.Printf("Received message for unknown channel: %s", msg.Channel)
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func contains(slice []string, item string) bool {
|
||||
// Empty filter slice means "match all"
|
||||
if len(slice) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Case-insensitive comparison
|
||||
item = strings.ToLower(item)
|
||||
for _, s := range slice {
|
||||
if strings.ToLower(s) == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Updated Channel.RunChannel() for logServices filtering
|
||||
func (ch *Channel) RunChannel() {
|
||||
for {
|
||||
select {
|
||||
case client := <-ch.Register:
|
||||
ch.lock.Lock()
|
||||
ch.Clients[client] = true
|
||||
ch.lock.Unlock()
|
||||
|
||||
case client := <-ch.Unregister:
|
||||
ch.lock.Lock()
|
||||
delete(ch.Clients, client)
|
||||
ch.lock.Unlock()
|
||||
|
||||
case message := <-ch.Broadcast:
|
||||
var msg logger.Message
|
||||
if err := json.Unmarshal(message, &msg); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
ch.lock.RLock()
|
||||
for client := range ch.Clients {
|
||||
// Special filtering for logServices
|
||||
if ch.Name == "logServices" {
|
||||
logLevel, _ := msg.Meta["level"].(string)
|
||||
logService, _ := msg.Meta["service"].(string)
|
||||
|
||||
levelMatch := len(client.LogLevels) == 0 || contains(client.LogLevels, logLevel)
|
||||
serviceMatch := len(client.Services) == 0 || contains(client.Services, logService)
|
||||
|
||||
if !levelMatch || !serviceMatch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
select {
|
||||
case client.Send <- message:
|
||||
default:
|
||||
ch.Unregister <- client
|
||||
}
|
||||
}
|
||||
ch.lock.RUnlock()
|
||||
}
|
||||
}
|
||||
}
|
||||
292
backend/internal/notifications/ws/ws_client.go
Normal file
292
backend/internal/notifications/ws/ws_client.go
Normal file
@@ -0,0 +1,292 @@
|
||||
package ws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/websocket"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/models"
|
||||
"lst.net/pkg"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
var (
|
||||
clients = make(map[*Client]bool)
|
||||
clientsMu sync.RWMutex
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
ClientID uuid.UUID `json:"client_id"`
|
||||
Conn *websocket.Conn `json:"-"` // Excluded from JSON
|
||||
APIKey string `json:"api_key"`
|
||||
IPAddress string `json:"ip_address"`
|
||||
UserAgent string `json:"user_agent"`
|
||||
Send chan []byte `json:"-"` // Excluded from JSON
|
||||
Channels map[string]bool `json:"channels"`
|
||||
LogLevels []string `json:"levels,omitempty"`
|
||||
Services []string `json:"services,omitempty"`
|
||||
Labels []string `json:"labels,omitempty"`
|
||||
ConnectedAt time.Time `json:"connected_at"`
|
||||
done chan struct{} // For graceful shutdown
|
||||
isAlive atomic.Bool
|
||||
lastActive time.Time // Tracks last activity
|
||||
|
||||
}
|
||||
|
||||
func (c *Client) SaveToDB(log *logger.CustomLogger, db *gorm.DB) {
|
||||
|
||||
// Convert c.Channels (map[string]bool) to map[string]interface{} for JSONB
|
||||
channels := make(map[string]interface{})
|
||||
for ch := range c.Channels {
|
||||
channels[ch] = true
|
||||
}
|
||||
|
||||
clientRecord := &models.ClientRecord{
|
||||
APIKey: c.APIKey,
|
||||
IPAddress: c.IPAddress,
|
||||
UserAgent: c.UserAgent,
|
||||
Channels: pkg.JSONB(channels),
|
||||
ConnectedAt: time.Now(),
|
||||
LastHeartbeat: time.Now(),
|
||||
}
|
||||
|
||||
if err := db.Create(&clientRecord).Error; err != nil {
|
||||
log.Error("❌ Error saving client", "websocket", map[string]interface{}{
|
||||
"error": err,
|
||||
})
|
||||
|
||||
} else {
|
||||
c.ClientID = clientRecord.ClientID
|
||||
c.ConnectedAt = clientRecord.ConnectedAt
|
||||
|
||||
clientData := fmt.Sprintf("A new client %v, just connected", c.ClientID)
|
||||
log.Info(clientData, "websocket", map[string]interface{}{})
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) MarkDisconnected(log *logger.CustomLogger, db *gorm.DB) {
|
||||
|
||||
clientData := fmt.Sprintf("Client %v Dicconected", c.ClientID)
|
||||
log.Info(clientData, "websocket", map[string]interface{}{})
|
||||
|
||||
now := time.Now()
|
||||
res := db.Model(&models.ClientRecord{}).
|
||||
Where("client_id = ?", c.ClientID).
|
||||
Updates(map[string]interface{}{
|
||||
"disconnected_at": &now,
|
||||
})
|
||||
|
||||
if res.RowsAffected == 0 {
|
||||
|
||||
log.Info("⚠️ No rows updated for client_id", "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
})
|
||||
}
|
||||
if res.Error != nil {
|
||||
|
||||
log.Error("❌ Error updating disconnected_at", "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
"error": res.Error,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) SafeClient() *Client {
|
||||
return &Client{
|
||||
ClientID: c.ClientID,
|
||||
APIKey: c.APIKey,
|
||||
IPAddress: c.IPAddress,
|
||||
UserAgent: c.UserAgent,
|
||||
Channels: c.Channels,
|
||||
LogLevels: c.LogLevels,
|
||||
Services: c.Services,
|
||||
Labels: c.Labels,
|
||||
ConnectedAt: c.ConnectedAt,
|
||||
}
|
||||
}
|
||||
|
||||
// GetAllClients returns safe representations of all clients
|
||||
func GetAllClients() []*Client {
|
||||
clientsMu.RLock()
|
||||
defer clientsMu.RUnlock()
|
||||
|
||||
var clientList []*Client
|
||||
for client := range clients {
|
||||
clientList = append(clientList, client.SafeClient())
|
||||
}
|
||||
return clientList
|
||||
}
|
||||
|
||||
// GetClientsByChannel returns clients in a specific channel
|
||||
func GetClientsByChannel(channel string) []*Client {
|
||||
clientsMu.RLock()
|
||||
defer clientsMu.RUnlock()
|
||||
|
||||
var channelClients []*Client
|
||||
for client := range clients {
|
||||
if client.Channels[channel] {
|
||||
channelClients = append(channelClients, client.SafeClient())
|
||||
}
|
||||
}
|
||||
return channelClients
|
||||
}
|
||||
|
||||
// heat beat stuff
|
||||
const (
|
||||
pingPeriod = 30 * time.Second
|
||||
pongWait = 60 * time.Second
|
||||
writeWait = 10 * time.Second
|
||||
)
|
||||
|
||||
func (c *Client) StartHeartbeat(log *logger.CustomLogger, db *gorm.DB) {
|
||||
|
||||
log.Debug("Started hearbeat", "websocket", map[string]interface{}{})
|
||||
ticker := time.NewTicker(pingPeriod)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
if !c.isAlive.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
c.Conn.SetWriteDeadline(time.Now().Add(writeWait))
|
||||
if err := c.Conn.WriteMessage(websocket.PingMessage, nil); err != nil {
|
||||
log.Error("Heartbeat failed", "websocket", map[string]interface{}{
|
||||
"client_id": c.ClientID,
|
||||
"error": err,
|
||||
})
|
||||
c.Close(log, db)
|
||||
return
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
res := db.Model(&models.ClientRecord{}).
|
||||
Where("client_id = ?", c.ClientID).
|
||||
Updates(map[string]interface{}{
|
||||
"last_heartbeat": &now,
|
||||
})
|
||||
|
||||
if res.RowsAffected == 0 {
|
||||
|
||||
log.Info("⚠️ No rows updated for client_id", "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
})
|
||||
}
|
||||
if res.Error != nil {
|
||||
|
||||
log.Error("❌ Error updating disconnected_at", "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
"error": res.Error,
|
||||
})
|
||||
}
|
||||
clientStuff := fmt.Sprintf("HeartBeat just done on: %v", c.ClientID)
|
||||
log.Info(clientStuff, "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
})
|
||||
|
||||
case <-c.done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) Close(log *logger.CustomLogger, db *gorm.DB) {
|
||||
if c.isAlive.CompareAndSwap(true, false) { // Atomic swap
|
||||
close(c.done)
|
||||
c.Conn.Close()
|
||||
// Add any other cleanup here
|
||||
c.MarkDisconnected(log, db)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) startServerPings(log *logger.CustomLogger, db *gorm.DB) {
|
||||
ticker := time.NewTicker(60 * time.Second) // Ping every 30s
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
c.Conn.SetWriteDeadline(time.Now().Add(10 * time.Second))
|
||||
if err := c.Conn.WriteMessage(websocket.PingMessage, nil); err != nil {
|
||||
|
||||
log.Error("Server Ping failed", "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
"error": err,
|
||||
})
|
||||
|
||||
c.Close(log, db)
|
||||
return
|
||||
}
|
||||
case <-c.done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) markActive() {
|
||||
c.lastActive = time.Now() // No mutex needed if single-writer
|
||||
}
|
||||
|
||||
func (c *Client) IsActive() bool {
|
||||
return time.Since(c.lastActive) < 45*time.Second // 1.5x ping interval
|
||||
}
|
||||
|
||||
func (c *Client) updateHeartbeat(log *logger.CustomLogger, db *gorm.DB) {
|
||||
//fmt.Println("Updating heatbeat")
|
||||
now := time.Now()
|
||||
|
||||
//fmt.Printf("Updating heartbeat for client: %s at %v\n", c.ClientID, now)
|
||||
|
||||
//db.DB = db.DB.Debug()
|
||||
res := db.Model(&models.ClientRecord{}).
|
||||
Where("client_id = ?", c.ClientID).
|
||||
Updates(map[string]interface{}{
|
||||
"last_heartbeat": &now, // Explicit format
|
||||
})
|
||||
//fmt.Printf("Executed SQL: %v\n", db.DB.Statement.SQL.String())
|
||||
if res.RowsAffected == 0 {
|
||||
|
||||
log.Info("⚠️ No rows updated for client_id", "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
})
|
||||
}
|
||||
if res.Error != nil {
|
||||
|
||||
log.Error("❌ Error updating disconnected_at", "websocket", map[string]interface{}{
|
||||
"clientID": c.ClientID,
|
||||
"error": res.Error,
|
||||
})
|
||||
}
|
||||
// 2. Verify DB connection
|
||||
if db == nil {
|
||||
log.Error("DB connection is nil", "websocket", map[string]interface{}{})
|
||||
return
|
||||
}
|
||||
|
||||
// 3. Test raw SQL execution first
|
||||
testRes := db.Exec("SELECT 1")
|
||||
if testRes.Error != nil {
|
||||
log.Error("DB ping failed", "websocket", map[string]interface{}{
|
||||
"error": testRes.Error,
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// work on this stats later
|
||||
// Add to your admin endpoint
|
||||
// type ConnectionStats struct {
|
||||
// TotalConnections int `json:"total_connections"`
|
||||
// ActiveConnections int `json:"active_connections"`
|
||||
// AvgDuration string `json:"avg_duration"`
|
||||
// }
|
||||
|
||||
// func GetConnectionStats() ConnectionStats {
|
||||
// // Implement your metrics tracking
|
||||
// }
|
||||
229
backend/internal/notifications/ws/ws_handler.go
Normal file
229
backend/internal/notifications/ws/ws_handler.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package ws
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/gorilla/websocket"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
type JoinPayload struct {
|
||||
Channel string `json:"channel"`
|
||||
APIKey string `json:"apiKey"`
|
||||
Services []string `json:"services,omitempty"`
|
||||
Levels []string `json:"levels,omitempty"`
|
||||
Labels []string `json:"labels,omitempty"`
|
||||
}
|
||||
|
||||
var upgrader = websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool { return true }, // allow all origins; customize for prod
|
||||
HandshakeTimeout: 15 * time.Second,
|
||||
ReadBufferSize: 1024,
|
||||
WriteBufferSize: 1024,
|
||||
EnableCompression: true,
|
||||
}
|
||||
|
||||
func SocketHandler(c *gin.Context, channels map[string]*Channel, log *logger.CustomLogger, db *gorm.DB) {
|
||||
// Upgrade HTTP to WebSocket
|
||||
conn, err := upgrader.Upgrade(c.Writer, c.Request, nil)
|
||||
if err != nil {
|
||||
log.Error("WebSocket upgrade failed", "websocket", map[string]interface{}{"error": err})
|
||||
return
|
||||
}
|
||||
//defer conn.Close()
|
||||
|
||||
// Create new client
|
||||
client := &Client{
|
||||
Conn: conn,
|
||||
APIKey: "exampleAPIKey",
|
||||
Send: make(chan []byte, 256), // Buffered channel
|
||||
Channels: make(map[string]bool),
|
||||
IPAddress: c.ClientIP(),
|
||||
UserAgent: c.Request.UserAgent(),
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
|
||||
client.isAlive.Store(true)
|
||||
// Add to global clients map
|
||||
clientsMu.Lock()
|
||||
clients[client] = true
|
||||
clientsMu.Unlock()
|
||||
|
||||
// Save initial connection to DB
|
||||
client.SaveToDB(log, db)
|
||||
// Save initial connection to DB
|
||||
// if err := client.SaveToDB(); err != nil {
|
||||
// log.Println("Failed to save client to DB:", err)
|
||||
// conn.Close()
|
||||
// return
|
||||
// }
|
||||
|
||||
// Set handlers
|
||||
conn.SetPingHandler(func(string) error {
|
||||
return nil // Auto-responds with pong
|
||||
})
|
||||
|
||||
conn.SetPongHandler(func(string) error {
|
||||
now := time.Now()
|
||||
client.markActive() // Track last pong time
|
||||
client.lastActive = now
|
||||
client.updateHeartbeat(log, db)
|
||||
return nil
|
||||
})
|
||||
|
||||
// Start server-side ping ticker
|
||||
go client.startServerPings(log, db)
|
||||
|
||||
defer func() {
|
||||
// Unregister from all channels
|
||||
for channelName := range client.Channels {
|
||||
if ch, exists := channels[channelName]; exists {
|
||||
ch.Unregister <- client
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from global clients map
|
||||
clientsMu.Lock()
|
||||
delete(clients, client)
|
||||
clientsMu.Unlock()
|
||||
|
||||
// Mark disconnected in DB
|
||||
client.MarkDisconnected(log, db)
|
||||
|
||||
// Close connection
|
||||
conn.Close()
|
||||
log.Info("Client disconnected", "websocket", map[string]interface{}{
|
||||
"client": client.ClientID,
|
||||
})
|
||||
}()
|
||||
|
||||
// Send welcome message immediately
|
||||
welcomeMsg := map[string]string{
|
||||
"status": "connected",
|
||||
"message": "Welcome to the WebSocket server. Send subscription request to begin.",
|
||||
}
|
||||
if err := conn.WriteJSON(welcomeMsg); err != nil {
|
||||
log.Error("Failed to send welcome message", "websocket", map[string]interface{}{"error": err})
|
||||
return
|
||||
}
|
||||
|
||||
// Message handling goroutine
|
||||
go func() {
|
||||
defer func() {
|
||||
// Cleanup on disconnect
|
||||
for channelName := range client.Channels {
|
||||
if ch, exists := channels[channelName]; exists {
|
||||
ch.Unregister <- client
|
||||
}
|
||||
}
|
||||
close(client.Send)
|
||||
client.MarkDisconnected(log, db)
|
||||
}()
|
||||
|
||||
for {
|
||||
_, msg, err := conn.ReadMessage()
|
||||
if err != nil {
|
||||
if websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway) {
|
||||
log.Error("Client disconnected unexpectedl", "websocket", map[string]interface{}{"error": err})
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
var payload struct {
|
||||
Channel string `json:"channel"`
|
||||
APIKey string `json:"apiKey"`
|
||||
Services []string `json:"services,omitempty"`
|
||||
Levels []string `json:"levels,omitempty"`
|
||||
Labels []string `json:"labels,omitempty"`
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(msg, &payload); err != nil {
|
||||
conn.WriteJSON(map[string]string{"error": "invalid payload format"})
|
||||
continue
|
||||
}
|
||||
|
||||
// Validate API key (implement your own validateAPIKey function)
|
||||
// if payload.APIKey == "" || !validateAPIKey(payload.APIKey) {
|
||||
// conn.WriteJSON(map[string]string{"error": "invalid or missing API key"})
|
||||
// continue
|
||||
// }
|
||||
|
||||
if payload.APIKey == "" {
|
||||
conn.WriteMessage(websocket.TextMessage, []byte("Missing API Key"))
|
||||
continue
|
||||
}
|
||||
client.APIKey = payload.APIKey
|
||||
|
||||
// Handle channel subscription
|
||||
switch payload.Channel {
|
||||
case "logServices":
|
||||
// Unregister from other channels if needed
|
||||
if client.Channels["labels"] {
|
||||
channels["labels"].Unregister <- client
|
||||
delete(client.Channels, "labels")
|
||||
}
|
||||
|
||||
// Update client filters
|
||||
client.Services = payload.Services
|
||||
client.LogLevels = payload.Levels
|
||||
|
||||
// Register to channel
|
||||
channels["logServices"].Register <- client
|
||||
client.Channels["logServices"] = true
|
||||
|
||||
conn.WriteJSON(map[string]string{
|
||||
"message": "You are now subscribed to the the service channel",
|
||||
"status": "subscribed",
|
||||
"channel": "logServices",
|
||||
})
|
||||
|
||||
case "labels":
|
||||
// Unregister from other channels if needed
|
||||
if client.Channels["logServices"] {
|
||||
channels["logServices"].Unregister <- client
|
||||
delete(client.Channels, "logServices")
|
||||
}
|
||||
|
||||
// Set label filters if provided
|
||||
if payload.Labels != nil {
|
||||
client.Labels = payload.Labels
|
||||
}
|
||||
|
||||
// Register to channel
|
||||
channels["labels"].Register <- client
|
||||
client.Channels["labels"] = true
|
||||
|
||||
// Update DB record
|
||||
client.SaveToDB(log, db)
|
||||
// if err := client.SaveToDB(); err != nil {
|
||||
// log.Println("Failed to update client labels:", err)
|
||||
// }
|
||||
|
||||
conn.WriteJSON(map[string]interface{}{
|
||||
"message": "You are now subscribed to the label channel",
|
||||
"status": "subscribed",
|
||||
"channel": "labels",
|
||||
"filters": client.Labels,
|
||||
})
|
||||
|
||||
default:
|
||||
conn.WriteJSON(map[string]string{
|
||||
"error": "invalid channel",
|
||||
"available_channels": "logServices, labels",
|
||||
})
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Send messages to client
|
||||
for message := range client.Send {
|
||||
if err := conn.WriteMessage(websocket.TextMessage, message); err != nil {
|
||||
log.Error("Write erro", "websocket", map[string]interface{}{"error": err})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
79
backend/internal/notifications/ws/ws_log_service.go
Normal file
79
backend/internal/notifications/ws/ws_log_service.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package ws
|
||||
|
||||
// setup the notifiyer
|
||||
|
||||
// -- Only needs to be run once in DB
|
||||
// CREATE OR REPLACE FUNCTION notify_new_log() RETURNS trigger AS $$
|
||||
// BEGIN
|
||||
// PERFORM pg_notify('new_log', row_to_json(NEW)::text);
|
||||
// RETURN NEW;
|
||||
// END;
|
||||
// $$ LANGUAGE plpgsql;
|
||||
|
||||
// DROP TRIGGER IF EXISTS new_log_trigger ON logs;
|
||||
|
||||
// CREATE TRIGGER new_log_trigger
|
||||
// AFTER INSERT ON logs
|
||||
// FOR EACH ROW EXECUTE FUNCTION notify_new_log();
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/lib/pq"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func LogServices(broadcaster chan logger.Message, log *logger.CustomLogger) {
|
||||
|
||||
log.Info("[LogServices] started - single channel for all logs", "websocket", map[string]interface{}{})
|
||||
|
||||
dsn := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable",
|
||||
os.Getenv("DB_HOST"),
|
||||
os.Getenv("DB_PORT"),
|
||||
os.Getenv("DB_USER"),
|
||||
os.Getenv("DB_PASSWORD"),
|
||||
os.Getenv("DB_NAME"),
|
||||
)
|
||||
|
||||
listener := pq.NewListener(dsn, 10*time.Second, time.Minute, nil)
|
||||
err := listener.Listen("new_log")
|
||||
if err != nil {
|
||||
log.Panic("Failed to LISTEN on new_log", "logger", map[string]interface{}{
|
||||
"error": err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
log.Info("Listening for all logs through single logServices channel...", "wbsocker", map[string]interface{}{})
|
||||
for {
|
||||
select {
|
||||
case notify := <-listener.Notify:
|
||||
if notify != nil {
|
||||
var logData map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(notify.Extra), &logData); err != nil {
|
||||
log.Error("Failed to unmarshal notification payload", "logger", map[string]interface{}{
|
||||
"error": err.Error(),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
// Always send to logServices channel
|
||||
broadcaster <- logger.Message{
|
||||
Channel: "logServices",
|
||||
Data: logData,
|
||||
Meta: map[string]interface{}{
|
||||
"level": logData["level"],
|
||||
"service": logData["service"],
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
case <-time.After(90 * time.Second):
|
||||
go func() {
|
||||
listener.Ping()
|
||||
}()
|
||||
}
|
||||
}
|
||||
}
|
||||
56
backend/internal/notifications/ws/ws_routes.go
Normal file
56
backend/internal/notifications/ws/ws_routes.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package ws
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
var (
|
||||
broadcaster = make(chan logger.Message)
|
||||
)
|
||||
|
||||
func RegisterSocketRoutes(r *gin.Engine, base_url string, log *logger.CustomLogger, db *gorm.DB) {
|
||||
// Initialize all channels
|
||||
InitializeChannels()
|
||||
|
||||
// Start channel processors
|
||||
StartAllChannels()
|
||||
|
||||
// Start background services
|
||||
go LogServices(broadcaster, log)
|
||||
go StartBroadcasting(broadcaster, channels)
|
||||
|
||||
// WebSocket route
|
||||
r.GET(base_url+"/ws", func(c *gin.Context) {
|
||||
SocketHandler(c, channels, log, db)
|
||||
})
|
||||
|
||||
r.GET(base_url+"/ws/clients", AdminAuthMiddleware(), handleGetClients)
|
||||
}
|
||||
|
||||
func handleGetClients(c *gin.Context) {
|
||||
channel := c.Query("channel")
|
||||
|
||||
var clientList []*Client
|
||||
if channel != "" {
|
||||
clientList = GetClientsByChannel(channel)
|
||||
} else {
|
||||
clientList = GetAllClients()
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"count": len(clientList),
|
||||
"clients": clientList,
|
||||
})
|
||||
}
|
||||
|
||||
func AdminAuthMiddleware() gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
// Implement your admin authentication logic
|
||||
// Example: Check API key or JWT token
|
||||
c.Next()
|
||||
}
|
||||
}
|
||||
41
backend/internal/router/middleware/settings_Check.go
Normal file
41
backend/internal/router/middleware/settings_Check.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"lst.net/internal/system/settings"
|
||||
)
|
||||
|
||||
func SettingCheckMiddleware(settingName string) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
// Debug: Log the setting name we're checking
|
||||
//log.Printf("Checking setting '%s' for path: %s", settingName, c.Request.URL.Path)
|
||||
|
||||
// Get the current setting value
|
||||
value, err := settings.GetString(settingName)
|
||||
if err != nil {
|
||||
//log.Printf("Error getting setting '%s': %v", settingName, err)
|
||||
c.AbortWithStatusJSON(404, gin.H{
|
||||
"error": "endpoint not available",
|
||||
"details": "setting error",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Debug: Log the actual value received
|
||||
//log.Printf("Setting '%s' value: '%s'", settingName, value)
|
||||
|
||||
// Changed condition to check for "1" (enable) instead of "0" (disable)
|
||||
if value != "1" {
|
||||
//log.Printf("Setting '%s' not enabled (value: '%s')", settingName, value)
|
||||
c.AbortWithStatusJSON(404, gin.H{
|
||||
"error": "endpoint not available",
|
||||
"details": "required feature is disabled",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Debug: Log successful check
|
||||
//log.Printf("Setting check passed for '%s'", settingName)
|
||||
c.Next()
|
||||
}
|
||||
}
|
||||
66
backend/internal/router/router.go
Normal file
66
backend/internal/router/router.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package router
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/gin-contrib/cors"
|
||||
"github.com/gin-gonic/gin"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/notifications/ws"
|
||||
"lst.net/internal/router/middleware"
|
||||
"lst.net/internal/system/servers"
|
||||
"lst.net/internal/system/settings"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func Setup(db *gorm.DB, basePath string, log *logger.CustomLogger) *gin.Engine {
|
||||
|
||||
r := gin.Default()
|
||||
|
||||
if os.Getenv("APP_ENV") == "production" {
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
}
|
||||
|
||||
// Enable CORS (adjust origins as needed)
|
||||
r.Use(cors.New(cors.Config{
|
||||
AllowOrigins: []string{"*"}, // Allow all origins (change in production)
|
||||
AllowMethods: []string{"GET", "OPTIONS", "POST", "DELETE", "PATCH", "CONNECT"},
|
||||
AllowHeaders: []string{"Origin", "Cache-Control", "Content-Type"},
|
||||
ExposeHeaders: []string{"Content-Length"},
|
||||
AllowCredentials: true,
|
||||
AllowWebSockets: true,
|
||||
}))
|
||||
|
||||
// Serve Docusaurus static files
|
||||
r.StaticFS(basePath+"/docs", http.Dir("docs"))
|
||||
r.StaticFS(basePath+"/app", http.Dir("frontend"))
|
||||
|
||||
// all routes to there respective systems.
|
||||
ws.RegisterSocketRoutes(r, basePath, log, db)
|
||||
settings.RegisterSettingsRoutes(r, basePath, log, db)
|
||||
servers.RegisterServersRoutes(r, basePath, log, db)
|
||||
|
||||
r.GET(basePath+"/api/ping", middleware.SettingCheckMiddleware("testingApiFunction"), func(c *gin.Context) {
|
||||
log.Info("Checking if the server is up", "system", map[string]interface{}{
|
||||
"endpoint": "/api/ping",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
})
|
||||
c.JSON(200, gin.H{"message": "pong"})
|
||||
})
|
||||
|
||||
r.Any(basePath+"/", func(c *gin.Context) { errorApiLoc(c, log) })
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func errorApiLoc(c *gin.Context, log *logger.CustomLogger) {
|
||||
|
||||
log.Error("Api endpoint hit that dose not exist", "system", map[string]interface{}{
|
||||
"endpoint": c.Request.URL.Path,
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
})
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": "looks like you have encountered a route that dose not exist"})
|
||||
}
|
||||
65
backend/internal/system/servers/get_servers.go
Normal file
65
backend/internal/system/servers/get_servers.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package servers
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/models"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func getServers(c *gin.Context, log *logger.CustomLogger, db *gorm.DB) {
|
||||
|
||||
servers, err := GetServers(log, db)
|
||||
log.Info("Current Settings", "system", map[string]interface{}{
|
||||
"endpoint": "/api/v1/settings",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
log.Error("Current Settings", "system", map[string]interface{}{
|
||||
"endpoint": "/api/v1/settings",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
"error": err,
|
||||
})
|
||||
c.JSON(500, gin.H{"message": "There was an error getting the settings", "error": err})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, gin.H{"message": "Current settings", "data": servers})
|
||||
}
|
||||
|
||||
func GetServers(log *logger.CustomLogger, db *gorm.DB) ([]map[string]interface{}, error) {
|
||||
var servers []models.Servers
|
||||
res := db.Find(&servers)
|
||||
|
||||
if res.Error != nil {
|
||||
return nil, res.Error
|
||||
}
|
||||
|
||||
toLowercase := func(s models.Servers) map[string]interface{} {
|
||||
t := reflect.TypeOf(s)
|
||||
v := reflect.ValueOf(s)
|
||||
|
||||
data := make(map[string]interface{})
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := strings.ToLower(t.Field(i).Name)
|
||||
data[field] = v.Field(i).Interface()
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
var lowercaseServers []map[string]interface{}
|
||||
for _, server := range servers {
|
||||
lowercaseServers = append(lowercaseServers, toLowercase(server))
|
||||
}
|
||||
|
||||
return lowercaseServers, nil
|
||||
}
|
||||
21
backend/internal/system/servers/new_server.go
Normal file
21
backend/internal/system/servers/new_server.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package servers
|
||||
|
||||
import (
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/models"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func NewServer(serverData models.Servers, log *logger.CustomLogger, db *gorm.DB) (string, error) {
|
||||
|
||||
err := db.Create(&serverData).Error
|
||||
|
||||
if err != nil {
|
||||
log.Error("There was an error adding the new server", "server", map[string]interface{}{
|
||||
"error": err,
|
||||
})
|
||||
return "There was an error adding the new server", err
|
||||
}
|
||||
|
||||
return "New server was just created", nil
|
||||
}
|
||||
13
backend/internal/system/servers/servers.go
Normal file
13
backend/internal/system/servers/servers.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package servers
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"gorm.io/gorm"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func RegisterServersRoutes(l *gin.Engine, baseUrl string, log *logger.CustomLogger, db *gorm.DB) {
|
||||
|
||||
s := l.Group(baseUrl + "/api/v1")
|
||||
s.GET("/servers", func(c *gin.Context) { getServers(c, log, db) })
|
||||
}
|
||||
59
backend/internal/system/servers/update_server.go
Normal file
59
backend/internal/system/servers/update_server.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package servers
|
||||
|
||||
// import (
|
||||
// "encoding/json"
|
||||
|
||||
// "github.com/gin-gonic/gin"
|
||||
// "lst.net/internal/db"
|
||||
// "lst.net/pkg/logger"
|
||||
// )
|
||||
|
||||
// func updateSettingById(c *gin.Context) {
|
||||
// log := logger.New()
|
||||
// settingID := c.Param("id")
|
||||
|
||||
// if settingID == "" {
|
||||
// c.JSON(500, gin.H{"message": "Invalid data"})
|
||||
// log.Error("Invalid data", "system", map[string]interface{}{
|
||||
// "endpoint": "/api/v1/settings",
|
||||
// "client_ip": c.ClientIP(),
|
||||
// "user_agent": c.Request.UserAgent(),
|
||||
// })
|
||||
// return
|
||||
// }
|
||||
// var setting SettingUpdateInput
|
||||
|
||||
// //err := c.ShouldBindBodyWithJSON(&setting)
|
||||
|
||||
// decoder := json.NewDecoder(c.Request.Body) // more strict and will force us to have correct data
|
||||
// decoder.DisallowUnknownFields()
|
||||
|
||||
// if err := decoder.Decode(&setting); err != nil {
|
||||
// c.JSON(400, gin.H{"message": "Invalid request body", "error": err.Error()})
|
||||
// log.Error("Invalid request body", "system", map[string]interface{}{
|
||||
// "endpoint": "/api/v1/settings",
|
||||
// "client_ip": c.ClientIP(),
|
||||
// "user_agent": c.Request.UserAgent(),
|
||||
// "error": err,
|
||||
// })
|
||||
// return
|
||||
// }
|
||||
|
||||
// if err := UpdateServer(db.DB, settingID, setting); err != nil {
|
||||
// c.JSON(500, gin.H{"message": "Failed to update setting", "error": err.Error()})
|
||||
// log.Error("Failed to update setting", "system", map[string]interface{}{
|
||||
// "endpoint": "/api/v1/settings",
|
||||
// "client_ip": c.ClientIP(),
|
||||
// "user_agent": c.Request.UserAgent(),
|
||||
// "error": err,
|
||||
// })
|
||||
// return
|
||||
// }
|
||||
|
||||
// c.JSON(200, gin.H{"message": "Setting was just updated", "data": setting})
|
||||
|
||||
// }
|
||||
|
||||
// func UpdateServer() (string, error) {
|
||||
// return "Server was just updated", nil
|
||||
// }
|
||||
39
backend/internal/system/settings/get_settings.go
Normal file
39
backend/internal/system/settings/get_settings.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package settings
|
||||
|
||||
import (
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func GetAllSettings(db *gorm.DB) ([]map[string]interface{}, error) {
|
||||
// var settings []models.Settings
|
||||
// result := db.Find(&settings)
|
||||
|
||||
// if result.Error != nil {
|
||||
// return nil, result.Error
|
||||
// }
|
||||
|
||||
// // Function to convert struct to map with lowercase keys
|
||||
// toLowercase := func(s models.Settings) map[string]interface{} {
|
||||
// t := reflect.TypeOf(s)
|
||||
// v := reflect.ValueOf(s)
|
||||
|
||||
// data := make(map[string]interface{})
|
||||
|
||||
// for i := 0; i < t.NumField(); i++ {
|
||||
// field := strings.ToLower(t.Field(i).Name)
|
||||
// data[field] = v.Field(i).Interface()
|
||||
// }
|
||||
|
||||
// return data
|
||||
// }
|
||||
|
||||
// // Convert each struct in settings slice to a map with lowercase keys
|
||||
// var lowercaseSettings []map[string]interface{}
|
||||
// for _, setting := range settings {
|
||||
// lowercaseSettings = append(lowercaseSettings, toLowercase(setting))
|
||||
// }
|
||||
|
||||
convertedSettings := GetMap()
|
||||
|
||||
return convertedSettings, nil
|
||||
}
|
||||
8
backend/internal/system/settings/inputs.go
Normal file
8
backend/internal/system/settings/inputs.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package settings
|
||||
|
||||
type SettingUpdateInput struct {
|
||||
Description *string `json:"description"`
|
||||
Value *string `json:"value"`
|
||||
Enabled *bool `json:"enabled"`
|
||||
AppService *string `json:"app_service"`
|
||||
}
|
||||
88
backend/internal/system/settings/settings.go
Normal file
88
backend/internal/system/settings/settings.go
Normal file
@@ -0,0 +1,88 @@
|
||||
package settings
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func RegisterSettingsRoutes(l *gin.Engine, baseUrl string, log *logger.CustomLogger, db *gorm.DB) {
|
||||
// seed the db on start up
|
||||
SeedSettings(db, log)
|
||||
|
||||
s := l.Group(baseUrl + "/api/v1")
|
||||
s.GET("/settings", func(c *gin.Context) { getSettings(c, log, db) })
|
||||
s.PATCH("/settings/:id", func(c *gin.Context) { updateSettingById(c, log, db) })
|
||||
}
|
||||
|
||||
func getSettings(c *gin.Context, log *logger.CustomLogger, db *gorm.DB) {
|
||||
configs, err := GetAllSettings(db)
|
||||
log.Info("Current Settings", "settings", map[string]interface{}{
|
||||
"endpoint": "/api/v1/settings",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log := logger.New()
|
||||
log.Error("Current Settings", "settings", map[string]interface{}{
|
||||
"endpoint": "/api/v1/settings",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
"error": err,
|
||||
})
|
||||
c.JSON(500, gin.H{"message": "There was an error getting the settings", "error": err})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, gin.H{"message": "Current settings", "data": configs})
|
||||
}
|
||||
|
||||
func updateSettingById(c *gin.Context, log *logger.CustomLogger, db *gorm.DB) {
|
||||
|
||||
settingID := c.Param("id")
|
||||
|
||||
if settingID == "" {
|
||||
c.JSON(500, gin.H{"message": "Invalid data"})
|
||||
log.Error("Invalid data", "settings", map[string]interface{}{
|
||||
"endpoint": "/api/v1/settings",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
})
|
||||
return
|
||||
}
|
||||
var setting SettingUpdateInput
|
||||
|
||||
//err := c.ShouldBindBodyWithJSON(&setting)
|
||||
|
||||
decoder := json.NewDecoder(c.Request.Body) // more strict and will force us to have correct data
|
||||
decoder.DisallowUnknownFields()
|
||||
|
||||
if err := decoder.Decode(&setting); err != nil {
|
||||
c.JSON(400, gin.H{"message": "Invalid request body", "error": err.Error()})
|
||||
log.Error("Invalid request body", "settings", map[string]interface{}{
|
||||
"endpoint": "/api/v1/settings",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
"error": err,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if err := UpdateSetting(log, db, settingID, setting); err != nil {
|
||||
c.JSON(500, gin.H{"message": "Failed to update setting", "error": err.Error()})
|
||||
log.Error("Failed to update setting", "settings", map[string]interface{}{
|
||||
"endpoint": "/api/v1/settings",
|
||||
"client_ip": c.ClientIP(),
|
||||
"user_agent": c.Request.UserAgent(),
|
||||
"error": err,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, gin.H{"message": "Setting was just updated", "data": setting})
|
||||
|
||||
}
|
||||
128
backend/internal/system/settings/settings_seed.go
Normal file
128
backend/internal/system/settings/settings_seed.go
Normal file
@@ -0,0 +1,128 @@
|
||||
package settings
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/models"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
var seedConfigData = []models.Settings{
|
||||
{Name: "serverPort", Description: "The port the server will listen on if not running in docker", Value: "4000", Enabled: true, AppService: "server"},
|
||||
{Name: "server", Description: "The server we will use when connecting to the alplaprod sql", Value: "usmcd1vms006", Enabled: true, AppService: "server"},
|
||||
{Name: "timezone", Value: "America/Chicago", Description: "What time zone is the server in this is used for cronjobs and some other time stuff", AppService: "server", Enabled: true},
|
||||
{Name: "dbUser", Value: "alplaprod", Description: "What is the db userName", AppService: "server", Enabled: true},
|
||||
{Name: "dbPass", Value: "b2JlbGl4", Description: "What is the db password", AppService: "server", Enabled: true},
|
||||
{Name: "tcpPort", Value: "2222", Description: "TCP port for printers to connect send data and the zedra cameras", AppService: "server", Enabled: true},
|
||||
{Name: "prolinkCheck", Value: "1", Description: "Will prolink be considered to check if matches, maninly used in plants that do not fully utilize prolink + ocp", AppService: "production", Enabled: true},
|
||||
{Name: "bookin", Value: "1", Description: "do we want to book in after a label is printed", AppService: "ocp", Enabled: true},
|
||||
{Name: "dbServer", Value: "usmcd1vms036", Description: "What server is the prod db on?", AppService: "server", Enabled: true},
|
||||
{Name: "printDelay", Value: "90", Description: "How long in seconds between prints", AppService: "ocp", Enabled: true},
|
||||
{Name: "plantToken", Value: "test3", Description: "What is the plant token", AppService: "server", Enabled: true},
|
||||
{Name: "dualPrinting", Value: "0", Description: "Dose the plant have 2 machines that go to 1?", AppService: "ocp", Enabled: true},
|
||||
{Name: "ocmeService", Value: "0", Description: "Is the ocme service enabled. this is gernerally only for Dayton.", AppService: "ocme", Enabled: true},
|
||||
{Name: "fifoCheck", Value: "45", Description: "How far back do we want to check for fifo default 45, putting 0 will ignore.", AppService: "ocme", Enabled: true},
|
||||
{Name: "dayCheck", Value: "3", Description: "how many days +/- to check for shipments in alplaprod", AppService: "ocme", Enabled: true},
|
||||
{Name: "maxLotPerTruck", Value: "3", Description: "How mant lots can we have per truck?", AppService: "ocme", Enabled: true},
|
||||
{Name: "monitorAddress", Value: "8", Description: "What address is monitored to be limited to the amount of lots that can be added to a truck.", AppService: "ocme", Enabled: true},
|
||||
{Name: "ocmeCycleCount", Value: "1", Description: "Are we allowing ocme cycle counts?", AppService: "ocme", Enabled: true},
|
||||
{Name: "devDir", Value: "", Description: "This is the dev dir and strictly only for updating the servers.", AppService: "server", Enabled: true},
|
||||
{Name: "demandMGTActivated", Value: "0", Description: "Do we allow for new fake edi?", AppService: "logistics", Enabled: true},
|
||||
{Name: "qualityRequest", Value: "0", Description: "quality request module?", AppService: "quality", Enabled: true},
|
||||
{Name: "ocpLogsCheck", Value: "4", Description: "How long do we want to allow logs to show that have not been cleared?", AppService: "ocp", Enabled: true},
|
||||
{Name: "inhouseDelivery", Value: "0", Description: "Are we doing auto inhouse delivery?", AppService: "ocp", Enabled: true},
|
||||
// dyco settings
|
||||
{Name: "dycoConnect", Value: "0", Description: "Are we running the dyco system?", AppService: "dycp", Enabled: true},
|
||||
{Name: "dycoPrint", Value: "0", Description: "Are we using the dyco to get the labels or the rfid?", AppService: "dyco", Enabled: true},
|
||||
{Name: "strapperCheck", Value: "1", Description: "Are we monitoring the strapper for faults?", AppService: "dyco", Enabled: true},
|
||||
// ocp
|
||||
{Name: "ocpActive", Value: `1`, Description: "Are we pritning on demand?", AppService: "ocp", Enabled: true},
|
||||
{Name: "ocpCycleDelay", Value: `10`, Description: "How long between printer cycles do we want to monitor.", AppService: "ocp", Enabled: true},
|
||||
{Name: "pNgAddress", Value: `139`, Description: "What is the address for p&g so we can make sure we have the correct fake edi forcast going in.", AppService: "logisitcs", Enabled: true},
|
||||
{Name: "scannerID", Value: `500`, Description: "What scanner id will we be using for the app", AppService: "logistics", Enabled: true},
|
||||
{Name: "scannerPort", Value: `50002`, Description: "What port instance will we be using?", AppService: "logistics", Enabled: true},
|
||||
{Name: "stagingReturnLocations", Value: `30125,31523`, Description: "What are the staging location IDs we will use to select from. seperated by commas", AppService: "logistics", Enabled: true},
|
||||
{Name: "testingApiFunction", Value: `1`, Description: "This is a test to validate if we set to 0 it will actaully not allow the route", AppService: "logistics", Enabled: true},
|
||||
}
|
||||
|
||||
func SeedSettings(db *gorm.DB, log *logger.CustomLogger) error {
|
||||
|
||||
for _, cfg := range seedConfigData {
|
||||
var existing models.Settings
|
||||
if err := db.Unscoped().Where("name = ?", cfg.Name).First(&existing).Error; err == nil {
|
||||
|
||||
if existing.DeletedAt.Valid {
|
||||
// Undelete by setting DeletedAt to NULL
|
||||
if err := db.Unscoped().Model(&existing).Update("DeletedAt", gorm.DeletedAt{}).Error; err != nil {
|
||||
log.Error("Failed to undelete settings", "settings", map[string]interface{}{
|
||||
"name": cfg.Name,
|
||||
"error": err,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
if err := db.Create(&cfg).Error; err != nil {
|
||||
log.Error("Failed to seed settings", "settings", map[string]interface{}{
|
||||
"name": cfg.Name,
|
||||
"error": err,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// // Try to find config by unique Name
|
||||
// result := db.Where("Name =?", cfg.Name).First(&existing)
|
||||
|
||||
// if result.Error != nil {
|
||||
// if result.Error == gorm.ErrRecordNotFound && cfg.Enabled {
|
||||
// // not here lets add it
|
||||
|
||||
// if err := db.Create(&cfg).Error; err != nil && !existing.DeletedAt.Valid {
|
||||
// log.Error("Failed to seed settings", "settings", map[string]interface{}{
|
||||
// "name": cfg.Name,
|
||||
// "error": err,
|
||||
// })
|
||||
// }
|
||||
|
||||
// //log.Printf("Seeded new config: %s", cfg.Name)
|
||||
// } else {
|
||||
// // Some other error
|
||||
// return result.Error
|
||||
// }
|
||||
} else {
|
||||
// remove the setting if we change to false this will help with future proofing our seeder in the event we need to add it back
|
||||
if cfg.Enabled {
|
||||
existing.Description = cfg.Description
|
||||
existing.Name = cfg.Name
|
||||
existing.AppService = cfg.AppService
|
||||
if err := db.Save(&existing).Error; err != nil {
|
||||
log.Error("Failed to update ettings.", "settings", map[string]interface{}{
|
||||
"name": cfg.Name,
|
||||
"error": err,
|
||||
})
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// we delete the setting so its no longer there
|
||||
if err := db.Delete(&existing).Error; err != nil {
|
||||
log.Error("Failed to delete ettings.", "settings", map[string]interface{}{
|
||||
"name": cfg.Name,
|
||||
"error": err,
|
||||
})
|
||||
return err
|
||||
}
|
||||
settingDelete := fmt.Sprintf("Updated existing config: %s", cfg.Name)
|
||||
log.Info(settingDelete, "settings", map[string]interface{}{})
|
||||
}
|
||||
|
||||
//log.Printf("Updated existing config: %s", cfg.Name)
|
||||
}
|
||||
}
|
||||
|
||||
log.Info("All settings added or updated.", "settings", map[string]interface{}{})
|
||||
|
||||
return nil
|
||||
}
|
||||
110
backend/internal/system/settings/settings_states.go
Normal file
110
backend/internal/system/settings/settings_states.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package settings
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/models"
|
||||
)
|
||||
|
||||
var (
|
||||
// Global state
|
||||
appSettings []models.Settings
|
||||
appSettingsLock sync.RWMutex
|
||||
dbInstance *gorm.DB
|
||||
)
|
||||
|
||||
// Initialize loads settings into memory at startup
|
||||
func Initialize(db *gorm.DB) error {
|
||||
dbInstance = db
|
||||
return Refresh()
|
||||
}
|
||||
|
||||
// Refresh reloads settings from DB (call after updates)
|
||||
func Refresh() error {
|
||||
appSettingsLock.Lock()
|
||||
defer appSettingsLock.Unlock()
|
||||
|
||||
var settings []models.Settings
|
||||
if err := dbInstance.Find(&settings).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
appSettings = settings
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetAll returns a thread-safe copy of settings
|
||||
func GetAll() []models.Settings {
|
||||
appSettingsLock.RLock()
|
||||
defer appSettingsLock.RUnlock()
|
||||
|
||||
// Return copy to prevent external modification
|
||||
copied := make([]models.Settings, len(appSettings))
|
||||
copy(copied, appSettings)
|
||||
return copied
|
||||
}
|
||||
|
||||
// GetMap returns settings as []map[string]interface{}
|
||||
func GetMap() []map[string]interface{} {
|
||||
return convertToMap(GetAll())
|
||||
}
|
||||
|
||||
// convertToMap helper (move your existing conversion logic here)
|
||||
func convertToMap(settings []models.Settings) []map[string]interface{} {
|
||||
toLowercase := func(s models.Settings) map[string]interface{} {
|
||||
t := reflect.TypeOf(s)
|
||||
v := reflect.ValueOf(s)
|
||||
|
||||
data := make(map[string]interface{})
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := strings.ToLower(t.Field(i).Name)
|
||||
data[field] = v.Field(i).Interface()
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
// Convert each struct in settings slice to a map with lowercase keys
|
||||
var lowercaseSettings []map[string]interface{}
|
||||
for _, setting := range settings {
|
||||
lowercaseSettings = append(lowercaseSettings, toLowercase(setting))
|
||||
}
|
||||
|
||||
return lowercaseSettings
|
||||
}
|
||||
|
||||
func GetString(name string) (string, error) {
|
||||
appSettingsLock.RLock()
|
||||
defer appSettingsLock.RUnlock()
|
||||
|
||||
for _, s := range appSettings {
|
||||
if s.Name == name { // assuming your model has a "Name" field
|
||||
fmt.Println(s.Value)
|
||||
return s.Value, nil // assuming your model has a "Value" field
|
||||
}
|
||||
}
|
||||
return "", errors.New("setting not found")
|
||||
}
|
||||
|
||||
func SetTemp(name, value string) {
|
||||
appSettingsLock.Lock()
|
||||
defer appSettingsLock.Unlock()
|
||||
|
||||
for i, s := range appSettings {
|
||||
if s.Name == name {
|
||||
appSettings[i].Value = value
|
||||
return
|
||||
}
|
||||
}
|
||||
// If not found, add new setting
|
||||
appSettings = append(appSettings, models.Settings{
|
||||
Name: name,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
56
backend/internal/system/settings/update_setting.go
Normal file
56
backend/internal/system/settings/update_setting.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package settings
|
||||
|
||||
import (
|
||||
"gorm.io/gorm"
|
||||
"lst.net/internal/models"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func UpdateSetting(log *logger.CustomLogger, db *gorm.DB, id string, input SettingUpdateInput) error {
|
||||
var cfg models.Settings
|
||||
if err := db.Where("setting_id =?", id).First(&cfg).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
updates := map[string]interface{}{}
|
||||
|
||||
if input.Description != nil {
|
||||
updates["description"] = *input.Description
|
||||
}
|
||||
if input.Value != nil {
|
||||
updates["value"] = *input.Value
|
||||
}
|
||||
if input.Enabled != nil {
|
||||
updates["enabled"] = *input.Enabled
|
||||
}
|
||||
if input.AppService != nil {
|
||||
updates["app_service"] = *input.AppService
|
||||
}
|
||||
|
||||
if len(updates) == 0 {
|
||||
return nil // nothing to update
|
||||
}
|
||||
|
||||
settingUpdate := db.Model(&cfg).Updates(updates)
|
||||
|
||||
if settingUpdate.Error != nil {
|
||||
log.Error("There was an error updating the setting", "settings", map[string]interface{}{
|
||||
"error": settingUpdate.Error,
|
||||
})
|
||||
return settingUpdate.Error
|
||||
}
|
||||
|
||||
if err := Refresh(); err != nil {
|
||||
log.Error("There was an error refreshing the settings after a setting update", "settings", map[string]interface{}{
|
||||
"error": err,
|
||||
})
|
||||
}
|
||||
|
||||
log.Info("The setting was just updated", "settings", map[string]interface{}{
|
||||
"id": id,
|
||||
"name": cfg.Name,
|
||||
"updated": updates,
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,32 +1,79 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/joho/godotenv"
|
||||
|
||||
"lst.net/internal/db"
|
||||
"lst.net/internal/router"
|
||||
"lst.net/internal/system/settings"
|
||||
"lst.net/pkg/logger"
|
||||
)
|
||||
|
||||
func main() {
|
||||
fmt.Println("Welcome to lst backend where all the fun happens.")
|
||||
r := gin.Default()
|
||||
log := logger.New()
|
||||
if os.Getenv("RUNNING_IN_DOCKER") != "true" {
|
||||
err := godotenv.Load("../.env")
|
||||
if err != nil {
|
||||
log := logger.New()
|
||||
log.Info("Warning: .env file not found (ok in Docker/production)", "system", map[string]interface{}{})
|
||||
}
|
||||
}
|
||||
|
||||
// Serve Docusaurus static files
|
||||
r.StaticFS("/lst/docs", http.Dir("docs"))
|
||||
// Initialize DB
|
||||
if _, err := db.InitDB(); err != nil {
|
||||
|
||||
log.Panic("Database intialize failed, please check the server asap.", "db", map[string]interface{}{
|
||||
"error": err.Error(),
|
||||
"cause": errors.Unwrap(err),
|
||||
"timeout": "30s",
|
||||
"details": fmt.Sprintf("%+v", err), // Full stack trace if available
|
||||
})
|
||||
}
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
|
||||
r.GET("/api/ping", func(c *gin.Context) {
|
||||
c.JSON(200, gin.H{"message": "pong"})
|
||||
})
|
||||
sqlDB, _ := db.DB.DB()
|
||||
sqlDB.Close()
|
||||
log.Error("Recovered from panic during DB shutdown", "db", map[string]interface{}{
|
||||
"panic": r,
|
||||
})
|
||||
}
|
||||
}()
|
||||
|
||||
r.Any("/api", errorApiLoc)
|
||||
r.Any("/", errorLoc)
|
||||
r.Run(":8080")
|
||||
}
|
||||
|
||||
func errorLoc(c *gin.Context) {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": "welcome to lst system you might have just encountered an incorrect area of the app"})
|
||||
}
|
||||
func errorApiLoc(c *gin.Context) {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": "looks like you have encountered an api route that dose not exist"})
|
||||
if err := settings.Initialize(db.DB); err != nil {
|
||||
log.Panic("There was an error intilizing the settings", "settings", map[string]interface{}{
|
||||
"error": err,
|
||||
})
|
||||
}
|
||||
|
||||
// long lived process like ocp running all the time should go here and base the db struct over.
|
||||
// go ocp.MonitorPrinters
|
||||
// go notifcations.Processor
|
||||
|
||||
// Set basePath dynamically
|
||||
basePath := "/"
|
||||
|
||||
if os.Getenv("APP_ENV") != "production" {
|
||||
basePath = "/lst" // Dev only
|
||||
}
|
||||
|
||||
log.Info("Welcome to lst backend where all the fun happens.", "system", map[string]interface{}{})
|
||||
// Init Gin router and pass DB to services
|
||||
r := router.Setup(db.DB, basePath, log)
|
||||
|
||||
// get the server port
|
||||
port := "8080"
|
||||
if os.Getenv("VITE_SERVER_PORT") != "" {
|
||||
port = os.Getenv("VITE_SERVER_PORT")
|
||||
}
|
||||
|
||||
if err := r.Run(":" + port); err != nil {
|
||||
log.Panic("Server failed to start", "system", map[string]interface{}{
|
||||
"error": err,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
3
backend/pkg/json.go
Normal file
3
backend/pkg/json.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package pkg
|
||||
|
||||
type JSONB map[string]interface{}
|
||||
18
backend/pkg/logger/create_log.go
Normal file
18
backend/pkg/logger/create_log.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"lst.net/internal/db"
|
||||
"lst.net/internal/models"
|
||||
"lst.net/pkg"
|
||||
)
|
||||
|
||||
// CreateLog inserts a new log entry.
|
||||
func CreateLog(level, message, service string, metadata pkg.JSONB) error {
|
||||
log := models.Log{
|
||||
Level: level,
|
||||
Message: message,
|
||||
Service: service,
|
||||
Metadata: metadata,
|
||||
}
|
||||
return db.DB.Create(&log).Error
|
||||
}
|
||||
77
backend/pkg/logger/discord.go
Normal file
77
backend/pkg/logger/discord.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
discordwebhook "github.com/bensch777/discord-webhook-golang"
|
||||
)
|
||||
|
||||
func CreateDiscordMsg(message string) {
|
||||
// we will only run the discord bot if we actaully put a url in the.
|
||||
if os.Getenv("WEBHOOK") != "" {
|
||||
var webhookurl = os.Getenv("WEBHOOK")
|
||||
host, _ := os.Hostname()
|
||||
embed := discordwebhook.Embed{
|
||||
Title: "A new crash report from lst.",
|
||||
Color: 15277667,
|
||||
Url: "https://avatars.githubusercontent.com/u/6016509?s=48&v=4",
|
||||
Timestamp: time.Now(),
|
||||
// Thumbnail: discordwebhook.Thumbnail{
|
||||
// Url: "https://avatars.githubusercontent.com/u/6016509?s=48&v=4",
|
||||
// },
|
||||
// Author: discordwebhook.Author{
|
||||
// Name: "Author Name",
|
||||
// Icon_URL: "https://avatars.githubusercontent.com/u/6016509?s=48&v=4",
|
||||
// },
|
||||
Fields: []discordwebhook.Field{
|
||||
discordwebhook.Field{
|
||||
Name: host,
|
||||
Value: message,
|
||||
Inline: false,
|
||||
},
|
||||
// discordwebhook.Field{
|
||||
// Name: "Error reason",
|
||||
// Value: stack,
|
||||
// Inline: false,
|
||||
// },
|
||||
// discordwebhook.Field{
|
||||
// Name: "Field 3",
|
||||
// Value: "Field Value 3",
|
||||
// Inline: false,
|
||||
// },
|
||||
},
|
||||
// Footer: discordwebhook.Footer{
|
||||
// Text: "Footer Text",
|
||||
// Icon_url: "https://avatars.githubusercontent.com/u/6016509?s=48&v=4",
|
||||
// },
|
||||
}
|
||||
|
||||
SendEmbed(webhookurl, embed)
|
||||
} else {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func SendEmbed(link string, embeds discordwebhook.Embed) error {
|
||||
logging := New()
|
||||
logging.Info("new messege being posted to discord", "logger", map[string]interface{}{
|
||||
"message": "Message",
|
||||
})
|
||||
hook := discordwebhook.Hook{
|
||||
Username: "Captain Hook",
|
||||
Avatar_url: "https://avatars.githubusercontent.com/u/6016509?s=48&v=4",
|
||||
Content: "Message",
|
||||
Embeds: []discordwebhook.Embed{embeds},
|
||||
}
|
||||
|
||||
payload, err := json.Marshal(hook)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
err = discordwebhook.ExecuteWebhook(link, payload)
|
||||
return err
|
||||
|
||||
}
|
||||
117
backend/pkg/logger/logger.go
Normal file
117
backend/pkg/logger/logger.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type CustomLogger struct {
|
||||
consoleLogger zerolog.Logger
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Channel string `json:"channel"`
|
||||
Data map[string]interface{} `json:"data"`
|
||||
Meta map[string]interface{} `json:"meta,omitempty"`
|
||||
}
|
||||
|
||||
// New creates a configured CustomLogger.
|
||||
func New() *CustomLogger {
|
||||
// Colorized console output
|
||||
consoleWriter := zerolog.ConsoleWriter{
|
||||
Out: os.Stderr,
|
||||
TimeFormat: "2006-01-02 15:04:05",
|
||||
}
|
||||
|
||||
return &CustomLogger{
|
||||
consoleLogger: zerolog.New(consoleWriter).
|
||||
With().
|
||||
Timestamp().
|
||||
Logger(),
|
||||
}
|
||||
}
|
||||
|
||||
func PrettyFormat(level, message string, metadata map[string]interface{}) string {
|
||||
timestamp := time.Now().Format("2006-01-02 15:04:05")
|
||||
base := fmt.Sprintf("[%s] %s| Message: %s", strings.ToUpper(level), timestamp, message)
|
||||
|
||||
if len(metadata) > 0 {
|
||||
metaJSON, _ := json.Marshal(metadata)
|
||||
return fmt.Sprintf("%s | Metadata: %s", base, string(metaJSON))
|
||||
}
|
||||
return base
|
||||
}
|
||||
|
||||
func (l *CustomLogger) logToPostgres(level, message, service string, metadata map[string]interface{}) {
|
||||
err := CreateLog(level, message, service, metadata)
|
||||
if err != nil {
|
||||
// Fallback to console if DB fails
|
||||
log.Error().Err(err).Msg("Failed to write log to PostgreSQL")
|
||||
}
|
||||
}
|
||||
|
||||
// --- Level-Specific Methods ---
|
||||
|
||||
func (l *CustomLogger) Info(message, service string, fields map[string]interface{}) {
|
||||
l.consoleLogger.Info().Fields(fields).Msg(message)
|
||||
l.logToPostgres("info", message, service, fields)
|
||||
|
||||
//PostLog(PrettyFormat("info", message, fields)) // Broadcast pretty message
|
||||
}
|
||||
|
||||
func (l *CustomLogger) Warn(message, service string, fields map[string]interface{}) {
|
||||
l.consoleLogger.Error().Fields(fields).Msg(message)
|
||||
l.logToPostgres("warn", message, service, fields)
|
||||
|
||||
//PostLog(PrettyFormat("warn", message, fields)) // Broadcast pretty message
|
||||
|
||||
// Custom logic for errors (e.g., alerting)
|
||||
if len(fields) > 0 {
|
||||
l.consoleLogger.Warn().Msg("Additional error context captured")
|
||||
}
|
||||
}
|
||||
|
||||
func (l *CustomLogger) Error(message, service string, fields map[string]interface{}) {
|
||||
l.consoleLogger.Error().Fields(fields).Msg(message)
|
||||
l.logToPostgres("error", message, service, fields)
|
||||
|
||||
//PostLog(PrettyFormat("error", message, fields)) // Broadcast pretty message
|
||||
|
||||
// Custom logic for errors (e.g., alerting)
|
||||
if len(fields) > 0 {
|
||||
l.consoleLogger.Warn().Msg("Additional error context captured")
|
||||
}
|
||||
}
|
||||
|
||||
func (l *CustomLogger) Panic(message, service string, fields map[string]interface{}) {
|
||||
// Log to console (colored, with fields)
|
||||
l.consoleLogger.Error().
|
||||
Str("service", service).
|
||||
Fields(fields).
|
||||
Msg(message + " (PANIC)") // Explicitly mark as panic
|
||||
|
||||
// Log to PostgreSQL (sync to ensure it's saved before crashing)
|
||||
err := CreateLog("panic", message, service, fields) // isCritical=true
|
||||
if err != nil {
|
||||
l.consoleLogger.Error().Err(err).Msg("Failed to save panic log to PostgreSQL")
|
||||
}
|
||||
|
||||
// Additional context (optional)
|
||||
if len(fields) > 0 {
|
||||
l.consoleLogger.Warn().Msg("Additional panic context captured")
|
||||
}
|
||||
|
||||
CreateDiscordMsg(message)
|
||||
panic(message)
|
||||
}
|
||||
|
||||
func (l *CustomLogger) Debug(message, service string, fields map[string]interface{}) {
|
||||
l.consoleLogger.Debug().Fields(fields).Msg(message)
|
||||
l.logToPostgres("debug", message, service, fields)
|
||||
}
|
||||
@@ -1,27 +1,26 @@
|
||||
---
|
||||
services:
|
||||
lst_backend:
|
||||
# build: . # Tell Docker Compose to build the image using the Dockerfile in the current directory
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./backend/Dockerfile
|
||||
image: git.tuffraid.net/cowch/logistics_support_tool:backend-latest
|
||||
container_name: lst_backend # A friendly name for your running container
|
||||
no_cache: true
|
||||
image: git.tuffraid.net/cowch/logistics_support_tool:latest
|
||||
container_name: lst_backend
|
||||
networks:
|
||||
- docker-network
|
||||
environment:
|
||||
DB_HOST: postgres
|
||||
DB_PORT: 5432
|
||||
DB_USER: username
|
||||
DB_PASSWORD: passwordl
|
||||
DB_NAME: lst
|
||||
volumes:
|
||||
- /path/to/backend/data:/data
|
||||
ports:
|
||||
- "8080:8080"
|
||||
restart: unless-stopped
|
||||
|
||||
lst_frontend:
|
||||
# build: . # Tell Docker Compose to build the image using the Dockerfile in the current directory
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./frontend/Dockerfile
|
||||
image: git.tuffraid.net/cowch/logistics_support_tool:frontend-latest
|
||||
container_name: lst_frontend # A friendly name for your running container
|
||||
volumes:
|
||||
- /path/to/frontend/data:/data
|
||||
ports:
|
||||
- "3120:3000"
|
||||
restart: unless-stopped
|
||||
pull_policy: never
|
||||
networks:
|
||||
docker-network:
|
||||
external: true
|
||||
|
||||
24
frontend/.gitignore
vendored
Normal file
24
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
@@ -1,29 +0,0 @@
|
||||
# Build Stage
|
||||
FROM node:24-alpine AS deps
|
||||
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm install
|
||||
|
||||
# Build the Next.js app
|
||||
FROM node:24-alpine AS builder
|
||||
WORKDIR /app
|
||||
COPY . ./
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
# Run other commands like prisma or drizzle
|
||||
RUN npm run build
|
||||
|
||||
# if more commands are needed after here do the same
|
||||
|
||||
# Final stage
|
||||
FROM node:24-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/.nitro /app/.nitro
|
||||
COPY --from=builder /app/.output /app/.output
|
||||
COPY --from=builder /app/.tanstack /app/.tanstack
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["node", ".output/server/index.mjs"]
|
||||
69
frontend/README.md
Normal file
69
frontend/README.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# React + TypeScript + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
Currently, two official plugins are available:
|
||||
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) for Fast Refresh
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
||||
|
||||
## Expanding the ESLint configuration
|
||||
|
||||
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
|
||||
|
||||
```js
|
||||
export default tseslint.config([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
// Other configs...
|
||||
|
||||
// Remove tseslint.configs.recommended and replace with this
|
||||
...tseslint.configs.recommendedTypeChecked,
|
||||
// Alternatively, use this for stricter rules
|
||||
...tseslint.configs.strictTypeChecked,
|
||||
// Optionally, add this for stylistic rules
|
||||
...tseslint.configs.stylisticTypeChecked,
|
||||
|
||||
// Other configs...
|
||||
],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
// other options...
|
||||
},
|
||||
},
|
||||
])
|
||||
```
|
||||
|
||||
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
|
||||
|
||||
```js
|
||||
// eslint.config.js
|
||||
import reactX from 'eslint-plugin-react-x'
|
||||
import reactDom from 'eslint-plugin-react-dom'
|
||||
|
||||
export default tseslint.config([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
// Other configs...
|
||||
// Enable lint rules for React
|
||||
reactX.configs['recommended-typescript'],
|
||||
// Enable lint rules for React DOM
|
||||
reactDom.configs.recommended,
|
||||
],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
// other options...
|
||||
},
|
||||
},
|
||||
])
|
||||
```
|
||||
23
frontend/eslint.config.js
Normal file
23
frontend/eslint.config.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
import tseslint from 'typescript-eslint'
|
||||
import { globalIgnores } from 'eslint/config'
|
||||
|
||||
export default tseslint.config([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
js.configs.recommended,
|
||||
tseslint.configs.recommended,
|
||||
reactHooks.configs['recommended-latest'],
|
||||
reactRefresh.configs.vite,
|
||||
],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
},
|
||||
},
|
||||
])
|
||||
13
frontend/index.html
Normal file
13
frontend/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Vite + React + TS</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
9620
frontend/package-lock.json
generated
9620
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,27 +1,29 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"build": "vite build"
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@tanstack/react-router": "^1.127.3",
|
||||
"@tanstack/react-start": "^1.127.4",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"vite": "^7.0.4"
|
||||
"react-dom": "^19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.30.1",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
"typescript": "^5.8.3",
|
||||
"vite-tsconfig-paths": "^5.1.4"
|
||||
"@vitejs/plugin-react-swc": "^3.10.2",
|
||||
"eslint": "^9.30.1",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.20",
|
||||
"globals": "^16.3.0",
|
||||
"typescript": "~5.8.3",
|
||||
"typescript-eslint": "^8.35.1",
|
||||
"vite": "^7.0.4"
|
||||
}
|
||||
}
|
||||
|
||||
1
frontend/public/vite.svg
Normal file
1
frontend/public/vite.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
42
frontend/src/App.css
Normal file
42
frontend/src/App.css
Normal file
@@ -0,0 +1,42 @@
|
||||
#root {
|
||||
max-width: 1280px;
|
||||
margin: 0 auto;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.logo {
|
||||
height: 6em;
|
||||
padding: 1.5em;
|
||||
will-change: filter;
|
||||
transition: filter 300ms;
|
||||
}
|
||||
.logo:hover {
|
||||
filter: drop-shadow(0 0 2em #646cffaa);
|
||||
}
|
||||
.logo.react:hover {
|
||||
filter: drop-shadow(0 0 2em #61dafbaa);
|
||||
}
|
||||
|
||||
@keyframes logo-spin {
|
||||
from {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: no-preference) {
|
||||
a:nth-of-type(2) .logo {
|
||||
animation: logo-spin infinite 20s linear;
|
||||
}
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 2em;
|
||||
}
|
||||
|
||||
.read-the-docs {
|
||||
color: #888;
|
||||
}
|
||||
41
frontend/src/App.tsx
Normal file
41
frontend/src/App.tsx
Normal file
@@ -0,0 +1,41 @@
|
||||
import { useState } from "react";
|
||||
import reactLogo from "./assets/react.svg";
|
||||
import viteLogo from "/vite.svg";
|
||||
import "./App.css";
|
||||
import WebSocketViewer from "./WebSocketTest";
|
||||
|
||||
function App() {
|
||||
const [count, setCount] = useState(0);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div>
|
||||
<a href="https://vite.dev" target="_blank">
|
||||
<img src={viteLogo} className="logo" alt="Vite logo" />
|
||||
</a>
|
||||
<a href="https://react.dev" target="_blank">
|
||||
<img
|
||||
src={reactLogo}
|
||||
className="logo react"
|
||||
alt="React logo"
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
<h1>Vite + React</h1>
|
||||
<div className="card">
|
||||
<button onClick={() => setCount((count) => count + 1)}>
|
||||
count is {count}
|
||||
</button>
|
||||
<p>
|
||||
Edit <code>src/App.tsx</code> and save to test HMR
|
||||
</p>
|
||||
</div>
|
||||
<p className="read-the-docs">
|
||||
Click on the Vite and React logos to learn more
|
||||
</p>
|
||||
<WebSocketViewer />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
41
frontend/src/WebSocketTest.tsx
Normal file
41
frontend/src/WebSocketTest.tsx
Normal file
@@ -0,0 +1,41 @@
|
||||
import { useEffect, useRef } from "react";
|
||||
|
||||
const WebSocketViewer = () => {
|
||||
const ws = useRef<any>(null);
|
||||
|
||||
useEffect(() => {
|
||||
// Connect to your Go backend WebSocket endpoint
|
||||
ws.current = new WebSocket(
|
||||
(window.location.protocol === "https:" ? "wss://" : "ws://") +
|
||||
window.location.host +
|
||||
"/lst/ws"
|
||||
);
|
||||
|
||||
ws.current.onopen = () => {
|
||||
console.log("[WebSocket] Connected");
|
||||
};
|
||||
|
||||
ws.current.onmessage = (event: any) => {
|
||||
console.log("[WebSocket] Message received:", event.data);
|
||||
};
|
||||
|
||||
ws.current.onerror = (error: any) => {
|
||||
console.error("[WebSocket] Error:", error);
|
||||
};
|
||||
|
||||
ws.current.onclose = () => {
|
||||
console.log("[WebSocket] Disconnected");
|
||||
};
|
||||
|
||||
// Cleanup on unmount
|
||||
return () => {
|
||||
if (ws.current) {
|
||||
ws.current.close();
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
return <div>Check the console for WebSocket messages</div>;
|
||||
};
|
||||
|
||||
export default WebSocketViewer;
|
||||
1
frontend/src/assets/react.svg
Normal file
1
frontend/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
68
frontend/src/index.css
Normal file
68
frontend/src/index.css
Normal file
@@ -0,0 +1,68 @@
|
||||
:root {
|
||||
font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
|
||||
line-height: 1.5;
|
||||
font-weight: 400;
|
||||
|
||||
color-scheme: light dark;
|
||||
color: rgba(255, 255, 255, 0.87);
|
||||
background-color: #242424;
|
||||
|
||||
font-synthesis: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
a {
|
||||
font-weight: 500;
|
||||
color: #646cff;
|
||||
text-decoration: inherit;
|
||||
}
|
||||
a:hover {
|
||||
color: #535bf2;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
place-items: center;
|
||||
min-width: 320px;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 3.2em;
|
||||
line-height: 1.1;
|
||||
}
|
||||
|
||||
button {
|
||||
border-radius: 8px;
|
||||
border: 1px solid transparent;
|
||||
padding: 0.6em 1.2em;
|
||||
font-size: 1em;
|
||||
font-weight: 500;
|
||||
font-family: inherit;
|
||||
background-color: #1a1a1a;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.25s;
|
||||
}
|
||||
button:hover {
|
||||
border-color: #646cff;
|
||||
}
|
||||
button:focus,
|
||||
button:focus-visible {
|
||||
outline: 4px auto -webkit-focus-ring-color;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
color: #213547;
|
||||
background-color: #ffffff;
|
||||
}
|
||||
a:hover {
|
||||
color: #747bff;
|
||||
}
|
||||
button {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
}
|
||||
10
frontend/src/main.tsx
Normal file
10
frontend/src/main.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.tsx'
|
||||
|
||||
createRoot(document.getElementById('root')!).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
@@ -1,59 +0,0 @@
|
||||
/* eslint-disable */
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
// noinspection JSUnusedGlobalSymbols
|
||||
|
||||
// This file was automatically generated by TanStack Router.
|
||||
// You should NOT make any changes in this file as it will be overwritten.
|
||||
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
|
||||
|
||||
import { Route as rootRouteImport } from './routes/__root'
|
||||
import { Route as IndexRouteImport } from './routes/index'
|
||||
|
||||
const IndexRoute = IndexRouteImport.update({
|
||||
id: '/',
|
||||
path: '/',
|
||||
getParentRoute: () => rootRouteImport,
|
||||
} as any)
|
||||
|
||||
export interface FileRoutesByFullPath {
|
||||
'/': typeof IndexRoute
|
||||
}
|
||||
export interface FileRoutesByTo {
|
||||
'/': typeof IndexRoute
|
||||
}
|
||||
export interface FileRoutesById {
|
||||
__root__: typeof rootRouteImport
|
||||
'/': typeof IndexRoute
|
||||
}
|
||||
export interface FileRouteTypes {
|
||||
fileRoutesByFullPath: FileRoutesByFullPath
|
||||
fullPaths: '/'
|
||||
fileRoutesByTo: FileRoutesByTo
|
||||
to: '/'
|
||||
id: '__root__' | '/'
|
||||
fileRoutesById: FileRoutesById
|
||||
}
|
||||
export interface RootRouteChildren {
|
||||
IndexRoute: typeof IndexRoute
|
||||
}
|
||||
|
||||
declare module '@tanstack/react-router' {
|
||||
interface FileRoutesByPath {
|
||||
'/': {
|
||||
id: '/'
|
||||
path: '/'
|
||||
fullPath: '/'
|
||||
preLoaderRoute: typeof IndexRouteImport
|
||||
parentRoute: typeof rootRouteImport
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const rootRouteChildren: RootRouteChildren = {
|
||||
IndexRoute: IndexRoute,
|
||||
}
|
||||
export const routeTree = rootRouteImport
|
||||
._addFileChildren(rootRouteChildren)
|
||||
._addFileTypes<FileRouteTypes>()
|
||||
@@ -1,19 +0,0 @@
|
||||
// src/router.tsx
|
||||
import { createRouter as createTanStackRouter } from '@tanstack/react-router'
|
||||
import { routeTree } from './routeTree.gen'
|
||||
|
||||
export function createRouter() {
|
||||
const router = createTanStackRouter({
|
||||
basepath: '/lst',
|
||||
routeTree,
|
||||
scrollRestoration: true,
|
||||
})
|
||||
|
||||
return router
|
||||
}
|
||||
|
||||
declare module '@tanstack/react-router' {
|
||||
interface Register {
|
||||
router: ReturnType<typeof createRouter>
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
/// <reference types="vite/client" />
|
||||
import type { ReactNode } from 'react'
|
||||
import {
|
||||
Outlet,
|
||||
createRootRoute,
|
||||
HeadContent,
|
||||
Scripts,
|
||||
} from '@tanstack/react-router'
|
||||
|
||||
export const Route = createRootRoute({
|
||||
head: () => ({
|
||||
meta: [
|
||||
{
|
||||
charSet: 'utf-8',
|
||||
},
|
||||
{
|
||||
name: 'viewport',
|
||||
content: 'width=device-width, initial-scale=1',
|
||||
},
|
||||
{
|
||||
title: 'TanStack Start Starter',
|
||||
},
|
||||
],
|
||||
}),
|
||||
component: RootComponent,
|
||||
})
|
||||
|
||||
function RootComponent() {
|
||||
return (
|
||||
<RootDocument>
|
||||
<Outlet />
|
||||
</RootDocument>
|
||||
)
|
||||
}
|
||||
|
||||
function RootDocument({ children }: Readonly<{ children: ReactNode }>) {
|
||||
return (
|
||||
<html>
|
||||
<head>
|
||||
<HeadContent />
|
||||
</head>
|
||||
<body>
|
||||
{children}
|
||||
<Scripts />
|
||||
</body>
|
||||
</html>
|
||||
)
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
// src/routes/index.tsx
|
||||
import * as fs from "node:fs";
|
||||
import { createFileRoute, useRouter } from "@tanstack/react-router";
|
||||
import { createServerFn } from "@tanstack/react-start";
|
||||
|
||||
const filePath = "count.txt";
|
||||
|
||||
async function readCount() {
|
||||
return parseInt(
|
||||
await fs.promises.readFile(filePath, "utf-8").catch(() => "0")
|
||||
);
|
||||
}
|
||||
|
||||
const getCount = createServerFn({
|
||||
method: "GET",
|
||||
}).handler(() => {
|
||||
return readCount();
|
||||
});
|
||||
|
||||
const updateCount = createServerFn({ method: "POST" })
|
||||
.validator((d: number) => d)
|
||||
.handler(async ({ data }) => {
|
||||
const count = await readCount();
|
||||
await fs.promises.writeFile(filePath, `${count + data}`);
|
||||
});
|
||||
|
||||
export const Route = createFileRoute("/")({
|
||||
component: Home,
|
||||
loader: async () => await getCount(),
|
||||
});
|
||||
|
||||
function Home() {
|
||||
const router = useRouter();
|
||||
const state = Route.useLoaderData();
|
||||
|
||||
return (
|
||||
<div>
|
||||
<p>This is just something to put in here</p>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
updateCount({ data: 1 }).then(() => {
|
||||
router.invalidate();
|
||||
});
|
||||
}}
|
||||
>
|
||||
Add 1 to {state}?
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
1
frontend/src/vite-env.d.ts
vendored
Normal file
1
frontend/src/vite-env.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/// <reference types="vite/client" />
|
||||
27
frontend/tsconfig.app.json
Normal file
27
frontend/tsconfig.app.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||
"target": "ES2022",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2022", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
@@ -1,10 +1,7 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"jsx": "react-jsx",
|
||||
"moduleResolution": "Bundler",
|
||||
"module": "ESNext",
|
||||
"target": "ES2022",
|
||||
"skipLibCheck": true,
|
||||
"strictNullChecks": true
|
||||
}
|
||||
}
|
||||
"files": [],
|
||||
"references": [
|
||||
{ "path": "./tsconfig.app.json" },
|
||||
{ "path": "./tsconfig.node.json" }
|
||||
]
|
||||
}
|
||||
|
||||
25
frontend/tsconfig.node.json
Normal file
25
frontend/tsconfig.node.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||
"target": "ES2023",
|
||||
"lib": ["ES2023"],
|
||||
"module": "ESNext",
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
@@ -1,11 +1,40 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import tsConfigPaths from 'vite-tsconfig-paths'
|
||||
import { tanstackStart } from '@tanstack/react-start/plugin/vite'
|
||||
import { defineConfig } from "vite";
|
||||
import react from "@vitejs/plugin-react-swc";
|
||||
|
||||
import path from "path";
|
||||
import dotenv from "dotenv";
|
||||
import { fileURLToPath } from "url";
|
||||
dotenv.config({
|
||||
path: path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../.env"),
|
||||
});
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
base: '/lst/',
|
||||
server: {
|
||||
port: 3000,
|
||||
},
|
||||
plugins: [tsConfigPaths(), tanstackStart({ target: 'node-server' })],
|
||||
})
|
||||
plugins: [react()],
|
||||
base: "/lst/app/",
|
||||
build: {
|
||||
outDir: "../backend/frontend",
|
||||
assetsDir: "assets",
|
||||
emptyOutDir: true,
|
||||
},
|
||||
server: {
|
||||
proxy: {
|
||||
"/lst/api": {
|
||||
target: `http://localhost:${Number(
|
||||
process.env.VITE_SERVER_PORT || 8080
|
||||
)}`,
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
},
|
||||
"/lst/ws": {
|
||||
target: `ws://localhost:${Number(
|
||||
process.env.VITE_SERVER_PORT || 8080
|
||||
)}`, // Your Go WebSocket endpoint
|
||||
ws: true,
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path.replace(/^\/ws/, ""),
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
34940
lst-docs/package-lock.json
generated
34940
lst-docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"start": "docusaurus start",
|
||||
"start": "docusaurus start --no-open",
|
||||
"build": "rimraf build && docusaurus build && npm run build:copy",
|
||||
"build:copy": "powershell -File copy.ps1",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
|
||||
190
package-lock.json
generated
190
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "logistics_support_tool",
|
||||
"version": "0.0.1-alpha.4",
|
||||
"version": "0.0.1-alpha.6",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "logistics_support_tool",
|
||||
"version": "0.0.1-alpha.4",
|
||||
"version": "0.0.1-alpha.6",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"dotenv": "^17.2.0",
|
||||
@@ -15,6 +15,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@release-it/conventional-changelog": "^10.0.1",
|
||||
"concurrently": "^9.2.0",
|
||||
"cz-conventional-changelog": "^3.3.0",
|
||||
"release-it": "^19.0.3",
|
||||
"standard-version": "^9.5.0"
|
||||
@@ -1810,6 +1811,168 @@
|
||||
"typedarray": "^0.0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently": {
|
||||
"version": "9.2.0",
|
||||
"resolved": "https://registry.npmjs.org/concurrently/-/concurrently-9.2.0.tgz",
|
||||
"integrity": "sha512-IsB/fiXTupmagMW4MNp2lx2cdSN2FfZq78vF90LBB+zZHArbIQZjQtzXCiXnvTxCZSvXanTqFLWBjw2UkLx1SQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chalk": "^4.1.2",
|
||||
"lodash": "^4.17.21",
|
||||
"rxjs": "^7.8.1",
|
||||
"shell-quote": "^1.8.1",
|
||||
"supports-color": "^8.1.1",
|
||||
"tree-kill": "^1.2.2",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"bin": {
|
||||
"conc": "dist/bin/concurrently.js",
|
||||
"concurrently": "dist/bin/concurrently.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/open-cli-tools/concurrently?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/chalk": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
||||
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/chalk/node_modules/supports-color": {
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
|
||||
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-flag": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/cliui": {
|
||||
"version": "8.0.1",
|
||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
|
||||
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"string-width": "^4.2.0",
|
||||
"strip-ansi": "^6.0.1",
|
||||
"wrap-ansi": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-name": "~1.1.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/concurrently/node_modules/has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/supports-color": {
|
||||
"version": "8.1.1",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
|
||||
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-flag": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/supports-color?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/yargs": {
|
||||
"version": "17.7.2",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
|
||||
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cliui": "^8.0.1",
|
||||
"escalade": "^3.1.1",
|
||||
"get-caller-file": "^2.0.5",
|
||||
"require-directory": "^2.1.1",
|
||||
"string-width": "^4.2.3",
|
||||
"y18n": "^5.0.5",
|
||||
"yargs-parser": "^21.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/concurrently/node_modules/yargs-parser": {
|
||||
"version": "21.1.1",
|
||||
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
|
||||
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/confbox": {
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz",
|
||||
@@ -6090,6 +6253,19 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/shell-quote": {
|
||||
"version": "1.8.3",
|
||||
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz",
|
||||
"integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/signal-exit": {
|
||||
"version": "3.0.7",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
|
||||
@@ -6520,6 +6696,16 @@
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tree-kill": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz",
|
||||
"integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"tree-kill": "cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/trim-newlines": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz",
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
{
|
||||
"name": "logistics_support_tool",
|
||||
"version": "0.0.1-alpha.4",
|
||||
"version": "0.0.1-alpha.6",
|
||||
"description": "This is the new logisitcs support tool",
|
||||
"private": true,
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "concurrently -n \"BACKEND,DOCS,FRONTEND\" -c \"bgBlue.bold,bgGreen.bold,bgMagenta.bold\" \"npm run backend\" \"npm run dev:docs\" \"npm run dev:front\"",
|
||||
"dev:front": "cd frontend && npm run dev",
|
||||
"dev:docs": "cd lst-docs && npm start -- --port 8081",
|
||||
"backend": "cd backend && go run .",
|
||||
"build": "powershell -File ./scripts/build.ps1",
|
||||
"docker:front": "docker build -t logistics_support_tool:frontend-latest -f frontend/Dockerfile ./frontend",
|
||||
@@ -27,7 +30,8 @@
|
||||
"@release-it/conventional-changelog": "^10.0.1",
|
||||
"cz-conventional-changelog": "^3.3.0",
|
||||
"release-it": "^19.0.3",
|
||||
"standard-version": "^9.5.0"
|
||||
"standard-version": "^9.5.0",
|
||||
"concurrently": "^9.2.0"
|
||||
},
|
||||
"config": {
|
||||
"commitizen": {
|
||||
|
||||
@@ -78,7 +78,7 @@ function Update-BuildNumber {
|
||||
$name = $matches[2]
|
||||
|
||||
$newNumber = $number + 1
|
||||
$newBuildNumber = "$newNumber-$name"
|
||||
$newBuildNumber = "$($newNumber)-$($name)"
|
||||
|
||||
Set-Content -Path $buildNumberFile -Value $newBuildNumber
|
||||
|
||||
@@ -87,26 +87,31 @@ function Update-BuildNumber {
|
||||
return $newBuildNumber
|
||||
} else {
|
||||
Write-Warning "BUILD_NUMBER file content '$current' is not in the expected 'number-name' format."
|
||||
Set-Content -Path $buildNumberFile -Value "1-"$($env:BUILD_NAME)
|
||||
return $null
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Push-Location $rootDir/backend
|
||||
|
||||
Write-Host "Building the app"
|
||||
go build -ldflags "-X main.version=$($version)-$($initialBuildValue)" -o lst_backend.exe ./main.go
|
||||
go get
|
||||
# swag init -o swagger -g main.go
|
||||
go build -ldflags "-X main.version=$($version)-$($initialBuildValue)" -o lst_app.exe ./main.go
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Warning "Backend build failed!"
|
||||
Write-Warning "app build failed!"
|
||||
Pop-Location
|
||||
break
|
||||
}
|
||||
|
||||
Write-Host "Backend build finished successfully."
|
||||
Write-Host "app build finished successfully."
|
||||
|
||||
Pop-Location
|
||||
|
||||
Push-Location $rootDir/frontend
|
||||
Write-Host "Building the frontend."
|
||||
npm i
|
||||
npm run build
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
@@ -120,12 +125,45 @@ function Update-BuildNumber {
|
||||
|
||||
Write-Host "Building wrapper"
|
||||
Push-Location $rootDir/LstWrapper
|
||||
|
||||
Write-Host "Changing the port to match the server port in the env file"
|
||||
$port = $env:VITE_SERVER_PORT
|
||||
if (-not $port) {
|
||||
$port = "8080" # Default port if env var not set
|
||||
}
|
||||
|
||||
$webConfigPath = "web.config"
|
||||
$content = Get-Content -Path $webConfigPath -Raw
|
||||
|
||||
$newContent = $content -replace '(?<=Rewrite" url="http://localhost:)\d+(?=/\{R:1\}")', $port
|
||||
|
||||
$newContent | Set-Content -Path $webConfigPath -NoNewline
|
||||
|
||||
Write-Host "Updated web.config rewrite port to $port"
|
||||
|
||||
#remove the publish folder as we done need it
|
||||
if (-not (Test-Path "publish")) {
|
||||
Write-Host "The publish folder is already deleted nothing else to do"
|
||||
} else {
|
||||
Remove-Item -LiteralPath "publish" -Force -Recurse
|
||||
}
|
||||
|
||||
dotnet publish -c Release -o ./publish
|
||||
|
||||
$webConfigPath = "web.config"
|
||||
$content = Get-Content -Path $webConfigPath -Raw
|
||||
|
||||
$newContent = $content -replace '(?<=Rewrite" url="http://localhost:)\d+(?=/\{R:1\}")', "8080"
|
||||
|
||||
$newContent | Set-Content -Path $webConfigPath -NoNewline
|
||||
|
||||
Write-Host "Updated web.config rewrite port back to 8080"
|
||||
|
||||
Pop-Location
|
||||
|
||||
Write-Host "Building Docs"
|
||||
Push-Location $rootDir/lst-docs
|
||||
npm i
|
||||
npm run build
|
||||
|
||||
Pop-Location
|
||||
@@ -153,7 +191,7 @@ try {
|
||||
npm run release
|
||||
|
||||
# deleteing the temp folder so we always cleaned up
|
||||
Delete-Tmp-Folder
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw "Failed to create Gitea release"
|
||||
}
|
||||
@@ -163,10 +201,10 @@ try {
|
||||
Write-Warning "Release process failed: $_"
|
||||
|
||||
# deleteing the temp folder so we always cleaned up
|
||||
Delete-Tmp-Folder
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
Delete-Tmp-Folder
|
||||
break
|
||||
|
||||
|
||||
|
||||
@@ -71,18 +71,22 @@ $tempStageDir = New-Item -ItemType Directory -Path (Join-Path $env:TEMP "lst_sta
|
||||
|
||||
# Copy files to organized structure
|
||||
$filesToCopy = @(
|
||||
@{ Source = "backend\lst_backend.exe"; Destination = "backend\lst_backend.exe" },
|
||||
@{ Source = "backend\lst_app.exe"; Destination = "app\lst_app.exe" },
|
||||
@{ Source = "backend\docs"; Destination = "app\docs\" },
|
||||
@{ Source = "backend\frontend"; Destination = "app\frontend\" },
|
||||
@{ Source = "LstWrapper\publish"; Destination = "lstwrapper\" },
|
||||
@{ Source = "frontend\.nitro"; Destination = "frontend\.nitro" },
|
||||
@{ Source = "frontend\.tanstack"; Destination = "frontend\.tanstack" },
|
||||
@{ Source = "frontend\.output"; Destination = "frontend\.output" },
|
||||
@{ Source = "frontend\public"; Destination = "frontend\public" },
|
||||
#@{ Source = "frontend\.nitro"; Destination = "frontend\.nitro" },
|
||||
#@{ Source = "frontend\.tanstack"; Destination = "frontend\.tanstack" },
|
||||
#@{ Source = "frontend\.output"; Destination = "frontend\.output" },
|
||||
#@{ Source = "frontend\public"; Destination = "frontend\public" },
|
||||
@{ Source = "package.json"; Destination = "package.json" },
|
||||
@{ Source = "CHANGELOG.md"; Destination = "CHANGELOG.md" },
|
||||
@{ Source = "README.md"; Destination = "README.md" },
|
||||
@{ Source = ".env-example"; Destination = ".env-example" },
|
||||
# scripts to be copied over
|
||||
@{ Source = "scripts\tmp"; Destination = "scripts\tmp" }
|
||||
@{ Source = "scripts\tmp"; Destination = "tmp" }
|
||||
@{ Source = "scripts\iisControls.ps1"; Destination = "scripts\iisControls.ps1" }
|
||||
@{ Source = "scripts\services.ps1"; Destination = "scripts\services.ps1" }
|
||||
# docs
|
||||
# @{ Source = "lst-docs\build"; Destination = "lst-docs\build" }
|
||||
)
|
||||
@@ -101,7 +105,7 @@ Remove-Item $tempStageDir -Recurse -Force
|
||||
|
||||
Write-Host "`nRelease package created at: $($zipPath)"
|
||||
Write-Host "Organized structure:"
|
||||
Write-Host "- backend/"
|
||||
Write-Host "- app/"
|
||||
Write-Host "- frontend/"
|
||||
Write-Host "- lstwrapper/"
|
||||
Write-Host "- scripts/"
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
Write-Host "Building the docker images for front and backend"
|
||||
docker build -t logistics_support_tool:frontend-latest -f ./frontend/Dockerfile --no-cache ./frontend
|
||||
docker build -t logistics_support_tool:backend-latest -f ./backend/Dockerfile --no-cache ./backend
|
||||
#docker build -t logistics_support_tool:frontend-latest -f ./frontend/Dockerfile --no-cache ./frontend
|
||||
docker build -t logistics_support_tool:latest -f ./backend/Dockerfile --no-cache ./backend
|
||||
|
||||
Write-Host "Tagging the builds with latest this is for testing test basically."
|
||||
docker tag logistics_support_tool:frontend-latest git.tuffraid.net/cowch/logistics_support_tool:frontend-latest
|
||||
docker tag logistics_support_tool:backend-latest git.tuffraid.net/cowch/logistics_support_tool:backend-latest
|
||||
#docker tag logistics_support_tool:frontend-latest git.tuffraid.net/cowch/logistics_support_tool:frontend-latest
|
||||
docker tag logistics_support_tool:latest git.tuffraid.net/cowch/logistics_support_tool:latest
|
||||
|
||||
# docker build -t logistics_support_tool:frontend-latest --no-cache .
|
||||
Write-Host "Push both builds to our gitea server."
|
||||
docker push git.tuffraid.net/cowch/logistics_support_tool:frontend-latest
|
||||
docker push git.tuffraid.net/cowch/logistics_support_tool:backend-latest
|
||||
#docker push git.tuffraid.net/cowch/logistics_support_tool:frontend-latest
|
||||
docker push git.tuffraid.net/cowch/logistics_support_tool:latest
|
||||
|
||||
Write-Host "Pull the new images to our docker system"
|
||||
docker compose -f ./docker-compose.yml up -d --force-recreate
|
||||
docker compose -f ./docker-compose.yml up -d --force-recreate
|
||||
|
||||
# in case we get logged out docker login git.tuffraid.net
|
||||
# create a docker network so we have this for us docker network create -d bridge my-bridge-network
|
||||
@@ -4,6 +4,9 @@ param (
|
||||
[string]$StopOrStart
|
||||
)
|
||||
|
||||
# Example string to run with the parameters in it.
|
||||
# .\iisControls.ps1 -ServerName "usmcd1vms036" -AppPoolName "LogisticsSupportTool" -StopOrStart "stop"
|
||||
|
||||
write-host $StopOrStart
|
||||
if ($StopOrStart -eq "stop") {
|
||||
Invoke-Command -ComputerName $ServerName -Credential $cred -ScriptBlock {
|
||||
|
||||
@@ -36,11 +36,20 @@ function Build-LstV2-And-Copy {
|
||||
New-Item -Path $scriptDir -Name "tmp" -ItemType "Directory"
|
||||
}
|
||||
|
||||
Write-Host "Jumping into lstV2 to build it."
|
||||
$defaultChoice = "n"
|
||||
$input = Read-Host "Do we want to build lstV2? (Y/n) [$defaultChoice]"
|
||||
|
||||
$choice = if ([string]::IsNullOrWhiteSpace($input)) { $defaultChoice } else { $input.ToLower() }
|
||||
|
||||
if($choice -eq "n"){
|
||||
Write-Host "Just going to copy the latest build over as we dont want to build a new one."
|
||||
} else {
|
||||
Write-Host "Jumping into lstV2 to build it."
|
||||
Push-Location $lstv2Loc
|
||||
npm run build
|
||||
|
||||
Write-Host "LSTV2 Finished building."
|
||||
}
|
||||
|
||||
Write-Host "Copy the latest build to the tmpLoc"
|
||||
|
||||
|
||||
0
scripts/serviceController.ps1
Normal file
0
scripts/serviceController.ps1
Normal file
207
scripts/services.ps1
Normal file
207
scripts/services.ps1
Normal file
@@ -0,0 +1,207 @@
|
||||
param (
|
||||
[string]$serviceName,
|
||||
[string]$option,
|
||||
[string]$appPath,
|
||||
[string]$command, # just the command like run start or what ever you have in npm.
|
||||
[string]$description,
|
||||
[string]$remote,
|
||||
[string]$server,
|
||||
[string]$username,
|
||||
[string]$admpass
|
||||
)
|
||||
|
||||
# Example string to run with the parameters in it.
|
||||
# .\scripts\services.ps1 -serviceName "LST_app" -option "install" -appPath "E:\LST" -description "Logistics Support Tool in go" -command "E:\LST\app\lst_app.exe"
|
||||
|
||||
$nssmPath = $AppPath + "\nssm.exe"
|
||||
$npmPath = "C:\Program Files\nodejs\npm.cmd" # Path to npm.cmd
|
||||
|
||||
# Convert the plain-text password to a SecureString
|
||||
$securePass = ConvertTo-SecureString $admpass -AsPlainText -Force
|
||||
$credentials = New-Object System.Management.Automation.PSCredential($username, $securePass)
|
||||
|
||||
if($remote -eq "true"){
|
||||
|
||||
# if(-not $username -or -not $admpass){
|
||||
# Write-host "Missing adm account info please try again."
|
||||
# exit 1
|
||||
# }
|
||||
|
||||
$plantFunness = {
|
||||
param ($service, $processType, $location)
|
||||
# Call your PowerShell script inside plantFunness
|
||||
# & "$($location)\dist\server\scripts\services.ps1" -serviceName $service -option $processType -appPath $location
|
||||
|
||||
if (-not ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")) {
|
||||
Write-Host "Error: This script must be run as Administrator."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if(-not $service -or -not $processType){
|
||||
Write-host "The service name or option is missing please enter one of them and try again."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if ($processType -eq "start"){
|
||||
write-host "Starting $($service)."
|
||||
Start-Service $service
|
||||
}
|
||||
|
||||
if ($processType -eq "stop"){
|
||||
write-host "Stoping $($service)."
|
||||
Stop-Service $service
|
||||
}
|
||||
|
||||
if ($processType -eq "restart"){
|
||||
write-host "Stoping $($service) to be restarted"
|
||||
Stop-Service $service
|
||||
Start-Sleep 3 # so we give it enough time to fully stop
|
||||
write-host "Starting $($service)"
|
||||
Start-Service $service
|
||||
}
|
||||
|
||||
if ($processType -eq "prodStop"){
|
||||
if(-not $location){
|
||||
Write-host "The path to the app is missing please add it in and try again."
|
||||
exit 1
|
||||
}
|
||||
& $nssmPath stop $service
|
||||
write-host "Removing $($service)"
|
||||
#& $nssmPath remove $serviceName confirm
|
||||
sc.exe config $service start= disabled
|
||||
|
||||
}
|
||||
|
||||
if ($processType -eq "prodStart"){
|
||||
if(-not $location){
|
||||
Write-host "The path to the app is missing please add it in and try again."
|
||||
exit 1
|
||||
}
|
||||
& $nssmPath start $service
|
||||
write-host "Removing $($service)"
|
||||
#& $nssmPath remove $serviceName confirm
|
||||
sc.exe config $service start= auto
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Invoke-Command -ComputerName $server -ScriptBlock $plantFunness -ArgumentList $serviceName, $option, $appPath -Credential $credentials
|
||||
} else {
|
||||
|
||||
if (-not ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")) {
|
||||
Write-Host "Error: This script must be run as Administrator."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if(-not $serviceName -or -not $option){
|
||||
Write-host "The service name or option is missing please enter one of them and try again."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if ($option -eq "start"){
|
||||
write-host "Starting $($serviceName)."
|
||||
Start-Service $serviceName
|
||||
}
|
||||
|
||||
if ($option -eq "stop"){
|
||||
write-host "Stoping $($serviceName)."
|
||||
Stop-Service $serviceName
|
||||
}
|
||||
|
||||
if ($option -eq "restart"){
|
||||
write-host "Stoping $($serviceName) to be restarted"
|
||||
Stop-Service $serviceName
|
||||
Start-Sleep 3 # so we give it enough time to fully stop
|
||||
write-host "Starting $($serviceName)"
|
||||
Start-Service $serviceName
|
||||
}
|
||||
|
||||
if ($option -eq "delete"){
|
||||
if(-not $appPath){
|
||||
Write-host "The path to the app is missing please add it in and try again."
|
||||
exit 1
|
||||
}
|
||||
& $nssmPath stop $serviceName
|
||||
write-host "Removing $($serviceName)"
|
||||
& $nssmPath remove $serviceName confirm
|
||||
|
||||
}
|
||||
|
||||
if ($option -eq "prodStop"){
|
||||
if(-not $appPath){
|
||||
Write-host "The path to the app is missing please add it in and try again."
|
||||
exit 1
|
||||
}
|
||||
& $nssmPath stop $serviceName
|
||||
write-host "Removing $($serviceName)"
|
||||
#& $nssmPath remove $serviceName confirm
|
||||
sc.exe config $serviceName start= disabled
|
||||
|
||||
}
|
||||
|
||||
if ($option -eq "prodStart"){
|
||||
if(-not $appPath){
|
||||
Write-host "The path to the app is missing please add it in and try again."
|
||||
exit 1
|
||||
}
|
||||
& $nssmPath start $serviceName
|
||||
write-host "Removing $($serviceName)"
|
||||
#& $nssmPath remove $serviceName confirm
|
||||
sc.exe config $serviceName start= auto
|
||||
|
||||
}
|
||||
|
||||
if($option -eq "install"){
|
||||
if(-not $appPath -or -not $description -or -not $command){
|
||||
Write-host "Please check all parameters are passed to install the app.."
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
$service = Get-Service -Name $serviceName -ErrorAction SilentlyContinue
|
||||
|
||||
if(-not $service){
|
||||
write-host $serviceName "is not installed we will install it now"
|
||||
|
||||
Write-Host "Installing $serviceName..."
|
||||
if($command.Contains(".exe")){
|
||||
|
||||
& $nssmPath install $serviceName $command
|
||||
|
||||
$fullAppPath = "$appPath\app"
|
||||
& $nssmPath set $serviceName AppDirectory $fullAppPath
|
||||
}else {
|
||||
& $nssmPath install $serviceName $npmPath $command
|
||||
& $nssmPath set $serviceName AppDirectory $appPath
|
||||
}
|
||||
|
||||
|
||||
& $nssmPath set $serviceName Description $description
|
||||
& $nssmPath set $serviceName AppStdout "E:\LST\logs\service.log"
|
||||
& $nssmPath set $serviceName AppStderr "E:\LST\logs\service-error.log"
|
||||
& $nssmPath set $serviceName DependOnService "MSSQLSERVER"
|
||||
# Set recovery options
|
||||
sc.exe failure $serviceName reset= 0 actions= restart/5000/restart/5000/restart/5000
|
||||
& $nssmPath start $serviceName
|
||||
}else{
|
||||
write-host $serviceName "is already installed will push the updated info"
|
||||
Write-Host "Updating $serviceName..."
|
||||
& $nssmPath stop $serviceName
|
||||
|
||||
if($command.Contains(".exe")){
|
||||
$fullAppPath = "$appPath\app"
|
||||
& $nssmPath set $serviceName AppDirectory $fullAppPath
|
||||
}else {
|
||||
& $nssmPath set $serviceName AppDirectory $appPath
|
||||
}
|
||||
|
||||
& $nssmPath set $serviceName Description $description
|
||||
# & $nssmPath set $serviceName DependOnService "IISADMIN MSSQLSERVER"
|
||||
# Set recovery options
|
||||
sc.exe failure $serviceName reset= 0 actions= restart/5000/restart/5000/restart/5000
|
||||
Start-Sleep 4
|
||||
& $nssmPath start $serviceName
|
||||
}
|
||||
}
|
||||
}
|
||||
246
scripts/updateServer.ps1
Normal file
246
scripts/updateServer.ps1
Normal file
@@ -0,0 +1,246 @@
|
||||
param (
|
||||
[string]$server,
|
||||
[string]$token,
|
||||
[string]$location,
|
||||
[string]$devFolder,
|
||||
[string]$serverIP,
|
||||
[string]$build,
|
||||
[string]$type,
|
||||
[string]$username,
|
||||
[string]$admpass,
|
||||
[string]$obslst,
|
||||
[string]$obsBuild
|
||||
)
|
||||
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$rootDir = Join-Path $scriptDir ".."
|
||||
|
||||
# example run script is
|
||||
# .\updateServer.ps1 -username "username" -admpass "password" -token "test3" -server "usmcd1vms036" -serverIP "10.193.0.56" -build "C:\Users\matthes01\Documents\logistics_support_tool\releases" -location "E:\LST"
|
||||
|
||||
# Convert the plain-text password to a SecureString
|
||||
$securePass = ConvertTo-SecureString $admpass -AsPlainText -Force
|
||||
$credentials = New-Object System.Management.Automation.PSCredential($username, $securePass)
|
||||
|
||||
# lets get the version of the app we are updating
|
||||
$pkgFile = "$rootDir\package.json"
|
||||
$package = Get-Content -Path $pkgFile -Raw | ConvertFrom-Json
|
||||
$buildNumberFile = Join-Path $rootDir "BUILD_NUMBER"
|
||||
|
||||
if (Test-Path $buildNumberFile) {
|
||||
$buildNumber = Get-Content $buildNumberFile -Raw
|
||||
$buildNumber = $buildNumber.Trim()
|
||||
Write-Host "Current build number: $buildNumber"
|
||||
} else {
|
||||
Write-Warning "BUILD_NUMBER file not found at $buildNumberFile"
|
||||
}
|
||||
|
||||
$version = "$($package.version)-$($buildNumber)"
|
||||
|
||||
# Checking to make sure the server is up and online
|
||||
Write-Output "Checking if $($token) is online to update."
|
||||
$pingResult = Test-Connection -ComputerName $serverIP -Count 2 -Quiet
|
||||
|
||||
if (-not $pingResult) {
|
||||
Write-Output "Server $($server) $($serverIP) is NOT reachable. Exiting script."
|
||||
exit 1 # Terminate the script with a non-zero exit code
|
||||
}
|
||||
|
||||
Write-Output "Server $($server) ($serverIP) is online."
|
||||
|
||||
|
||||
$buildZip = "release-v$version.zip"
|
||||
if (-Not (Test-Path -Path "$($build)\$($buildZip)")) {
|
||||
Write-Host "Build is missing from the build folder."
|
||||
Write-host $buildZip
|
||||
exit
|
||||
} else {
|
||||
Write-Host "We have the build lets move to next step"
|
||||
}
|
||||
|
||||
|
||||
Write-Host "Starting the update Process"
|
||||
Write-Host "File to be copied over is $buildZip"
|
||||
Write-Host "Coping files to $($server)"
|
||||
$destination = "\\$($server)\$($location)" -replace ":", "$"
|
||||
Write-Host $destination
|
||||
Write-Host "Forcing the removal of the mapped drive, just incase it was lingering"
|
||||
|
||||
Get-PSDrive -Name "z" -ErrorAction SilentlyContinue | Remove-PSDrive -Force
|
||||
|
||||
|
||||
# Create a mapped drive with credentials using New-PSDrive for the current session
|
||||
|
||||
try {
|
||||
net use $server /delete /y 2>$null
|
||||
#New-PSDrive -Name "z" -PSProvider FileSystem -Root $destination -Credential $credentials
|
||||
New-PSDrive -Name "Z" -PSProvider FileSystem -Root $destination -Persist -Credential $credentials
|
||||
|
||||
# Create the update folder if it doesn't exist
|
||||
if (-not (Test-Path -Path $destination)) {
|
||||
New-Item -ItemType Directory -Path $destination -Force
|
||||
}
|
||||
|
||||
# Copying files to the server
|
||||
Write-Host "Copying files to $($server)"
|
||||
Copy-Item -Path "$($build)\$($buildZip)" -Destination "Z:\" -Recurse -Force
|
||||
Write-Host "Files copied to $($server)"
|
||||
} catch {
|
||||
Write-Host "Error: $_"
|
||||
} finally {
|
||||
# Remove the mapped drive after copying
|
||||
if (Get-PSDrive -Name "Z" -ErrorAction SilentlyContinue) {
|
||||
Write-Host "Removing mapped drive..."
|
||||
Remove-PSDrive -Name "Z"
|
||||
}
|
||||
}
|
||||
|
||||
# create the extracted path
|
||||
$extractedFolderPath = "$server\$location$(if ($token -eq "usiow2") { "_2" })"
|
||||
|
||||
$plantFunness = {
|
||||
param ($server, $token, $location, $buildFile, $buildLoc)
|
||||
|
||||
$localPath = $location -replace '\$', ':'
|
||||
$serverFile = "$($localPath)\$buildFile"
|
||||
$serverPath = "$($localPath)"
|
||||
|
||||
|
||||
# $appPath = $extractedFolderPath
|
||||
$nssmPath = $serverPath + "\nssm.exe"
|
||||
$npmPath = "C:\Program Files\nodejs\npm.cmd"
|
||||
|
||||
$serviceApp = "LST_app$(if ($token -eq "usiow2") { "_2" })"
|
||||
#$serviceFrontEnd = "LST-frontend$(if ($token -eq "usiow2") { "_2" })"
|
||||
$serviceOcme = "LST-Ocme$(if ($token -eq "usiow2") { "_2" })"
|
||||
$serviceLstV2 = "LSTV2$(if ($token -eq "usiow2") { "_2" })"
|
||||
|
||||
Write-Host "In the plant we go!!!!!"
|
||||
# $servicesScript = Join-Path $PSScriptRoot "scripts\services.ps1"
|
||||
# $iisContorl = Join-Path $PSScriptRoot "scripts\iisControls.ps1"
|
||||
######################################################################################
|
||||
# Stop the services and iis to do the updates.
|
||||
######################################################################################
|
||||
Write-Host "Stopping the services to do the updates, pkgs and db changes."
|
||||
|
||||
Write-Host "Stopping $serviceApp"
|
||||
Stop-Service -DisplayName $serviceApp -Force
|
||||
Start-Sleep -Seconds 1
|
||||
|
||||
Write-Host "Stopping $serviceLstV2"
|
||||
Stop-Service -DisplayName $serviceLstV2 -Force
|
||||
Start-Sleep -Seconds 1
|
||||
|
||||
if($token -eq "usday1"){
|
||||
Write-Host "Stopping $($serviceOcme)"
|
||||
Stop-Service -DisplayName $serviceOcme -Force
|
||||
}
|
||||
|
||||
Write-Host "Stopping iis application"
|
||||
Stop-WebAppPool -Name LogisticsSupportTool #-ErrorAction Stop
|
||||
Start-Sleep -Seconds 3
|
||||
|
||||
|
||||
######################################################################################
|
||||
# Removing the cashed stuff to keep the folder clean.
|
||||
######################################################################################
|
||||
|
||||
# Delete the directories after extraction
|
||||
Write-Host "Deleting app and other cache stuff"
|
||||
|
||||
Set-Location $serverPath
|
||||
if (-not (Test-Path "app")) {
|
||||
Write-Host "The app folder is already deleted nothing else to do"
|
||||
} else {
|
||||
Remove-Item -LiteralPath "app" -Force -Recurse
|
||||
}
|
||||
|
||||
if (-not (Test-Path "lstWrapper")) {
|
||||
Write-Host "The lstWrapper folder is already deleted nothing else to do"
|
||||
} else {
|
||||
Remove-Item -LiteralPath "lstWrapper" -Force -Recurse
|
||||
}
|
||||
|
||||
if (-not (Test-Path "scripts")) {
|
||||
Write-Host "The scripts folder is already deleted nothing else to do"
|
||||
} else {
|
||||
Remove-Item -LiteralPath "scripts" -Force -Recurse
|
||||
}
|
||||
|
||||
if (-not (Test-Path "tmp")) {
|
||||
Write-Host "The scripts folder is already deleted nothing else to do"
|
||||
} else {
|
||||
Remove-Item -LiteralPath "tmp" -Force -Recurse
|
||||
}
|
||||
|
||||
|
||||
######################################################################################
|
||||
# Extract everything the cashed stuff to keep the folder clean.
|
||||
######################################################################################
|
||||
|
||||
Write-Host "Unzipping the folder..."
|
||||
try {
|
||||
# Expand the archive
|
||||
Expand-Archive -Path $serverFile -DestinationPath $serverPath -Force
|
||||
|
||||
# Delete the zip file after extraction
|
||||
Write-Host "Deleting the zip file..."
|
||||
Remove-Item -Path $serverFile -Force
|
||||
} catch {
|
||||
Write-Host "Error: $_"
|
||||
exit 1 # Exit with a non-zero code if there's an error
|
||||
}
|
||||
|
||||
Write-Host "Unzip the LSTV2 app"
|
||||
|
||||
#lstv2 loc
|
||||
$lstv2 = Get-ChildItem -Path "$($localPath)\tmp" -Filter *.zip
|
||||
|
||||
write-host $lstv2[0].FullName
|
||||
|
||||
if ($lstv2.Count -eq 0) {
|
||||
Write-Host "No zip files found in $localPath\tmp"
|
||||
|
||||
} else {
|
||||
$zipToExtract = $lstv2[0].FullName
|
||||
$destination = "$localPath\lstv2"
|
||||
|
||||
Expand-Archive -Path $zipToExtract -DestinationPath $destination -Force
|
||||
Write-Host "Extracted $zipToExtract to $destination"
|
||||
|
||||
$removePath = "$($localPath)\tmp"
|
||||
Remove-Item -Path $removePath -Force /Y
|
||||
}
|
||||
|
||||
######################################################################################
|
||||
# Doing the update stuff.
|
||||
# for now just on the old version.
|
||||
######################################################################################
|
||||
Push-Location $serverPath/lstv2
|
||||
npm i
|
||||
npm run db:migrate
|
||||
|
||||
######################################################################################
|
||||
# Start the services and iis backup.
|
||||
######################################################################################
|
||||
Write-Host "Starting the services up."
|
||||
|
||||
Write-Host "Starting $serviceApp"
|
||||
Start-Service -DisplayName $serviceApp
|
||||
Start-Sleep -Seconds 1
|
||||
|
||||
Write-Host "Starting $serviceLstV2"
|
||||
Start-Service -DisplayName $serviceLstV2
|
||||
Start-Sleep -Seconds 1
|
||||
|
||||
if($token -eq "usday1"){
|
||||
Write-Host "Starting $($serviceOcme)"
|
||||
Start-Service -DisplayName $serviceOcme
|
||||
}
|
||||
|
||||
Write-Host "Starting iis application"
|
||||
Start-WebAppPool -Name LogisticsSupportTool
|
||||
|
||||
}
|
||||
|
||||
Invoke-Command -ComputerName $server -ScriptBlock $plantFunness -ArgumentList $server, $token, $location, $buildZip, $buildLoc -Credential $credentials
|
||||
Reference in New Issue
Block a user