Compare commits

...

38 Commits

Author SHA1 Message Date
d8455ccaa3 Merge branch 'main' into devcontainers 2026-02-03 21:42:12 -06:00
7ca617a51c Fixed annoying bullshit 2026-02-03 22:41:37 -05:00
0e2c5f8318 Merge pull request 'devcontainers' (#183) from devcontainers into main
All checks were successful
Continuous Integration / Update Development (push) Successful in 2m49s
Reviewed-on: #183
2026-02-03 21:27:50 -06:00
6811dc461c Merge branch 'main' into devcontainers 2026-02-03 21:27:30 -06:00
6f11bdb01d Merge branch 'devcontainers' of https://gitea.iceberg-gaming.com/17th-Ranger-Battalion-ORG/milsim-site-v4 into devcontainers 2026-02-03 22:27:03 -05:00
dd440a4e75 Cleaned up unused tables 2026-02-03 22:27:02 -05:00
2f7276a6c6 Merge pull request 'devcontainers' (#182) from devcontainers into main
All checks were successful
Continuous Integration / Update Development (push) Successful in 3m1s
Reviewed-on: #182
2026-02-03 21:23:10 -06:00
c18ef9aa8d Merge branch 'main' into devcontainers 2026-02-03 21:22:57 -06:00
3a5f9eb6f0 Merge branch 'devcontainers' of https://gitea.iceberg-gaming.com/17th-Ranger-Battalion-ORG/milsim-site-v4 into devcontainers 2026-02-03 22:22:52 -05:00
ab31b6e9f2 Corrected SP update handling 2026-02-03 22:22:50 -05:00
9ec30be6fb Merge pull request 'devcontainers' (#181) from devcontainers into main
All checks were successful
Continuous Integration / Update Development (push) Successful in 3m1s
Reviewed-on: #181
2026-02-03 20:40:22 -06:00
0c58e4045f Merge branch 'main' into devcontainers 2026-02-03 20:40:11 -06:00
ca23675dd1 Merge branch 'devcontainers' of https://gitea.iceberg-gaming.com/17th-Ranger-Battalion-ORG/milsim-site-v4 into devcontainers 2026-02-03 21:39:48 -05:00
e8805616c7 Fixed view creation breaking 2026-02-03 21:39:47 -05:00
1f9511139f Merge pull request 'Fixed stored procs trying to overwrite themselves' (#180) from devcontainers into main
All checks were successful
Continuous Integration / Update Development (push) Successful in 2m59s
Reviewed-on: #180
2026-02-03 20:31:58 -06:00
d8fbaed538 Merge branch 'main' into devcontainers 2026-02-03 20:31:46 -06:00
edbd18744d Fixed stored procs trying to overwrite themselves 2026-02-03 21:28:27 -05:00
76ca516bf6 Merge pull request 'devcontainers' (#176) from devcontainers into main
All checks were successful
Continuous Integration / Update Development (push) Successful in 3m43s
Reviewed-on: #176
2026-02-03 20:16:12 -06:00
c4f46eeffd Merge branch 'main' into devcontainers 2026-02-03 19:09:30 -06:00
d0839ed51d Removed logging 2026-02-01 11:44:02 -05:00
ec4a35729f Fixed member role parsing 2026-02-01 11:42:01 -05:00
686838e9bf Merge remote-tracking branch 'Origin/main' into devcontainers 2026-02-01 11:35:47 -05:00
7445dbf9f8 Integrated db-migrate package 2026-02-01 11:35:24 -05:00
8415e27ff3 added readme 2026-01-26 20:41:25 -05:00
083ddc345b overhauled mock auth solution 2026-01-26 01:14:19 -05:00
b4fcb1a366 finalized migration scripts 2026-01-25 20:14:24 -05:00
7017c2427c Updated db scripts 2026-01-25 10:49:29 -05:00
f9f1593b46 Merge branch 'main' into devcontainers 2026-01-21 12:35:14 -05:00
f26b285a88 Fixed launch script 2026-01-19 19:27:11 -05:00
d9732830bb added auth mode changes 2026-01-19 19:22:15 -05:00
2c2936b01f Merge remote-tracking branch 'Origin/main' into devcontainers 2026-01-19 19:07:13 -05:00
e177723767 Update data migrations and removed redundant env 2025-12-30 20:41:14 -05:00
dae6d142f2 Merge branch 'main' into devcontainers 2025-12-30 20:00:35 -05:00
67ce112934 added database seed function 2025-10-29 00:34:34 -04:00
33eca18e82 added database migration system, reference package.json for commands 2025-10-28 21:31:14 -04:00
6b29501d59 created .env manager system and prod/dev run commands 2025-10-28 17:48:55 -04:00
8670b50b56 ignored dev database files 2025-10-28 17:47:56 -04:00
4445f5dd92 created docker compose dev 2025-10-28 16:24:22 -04:00
18 changed files with 114795 additions and 112 deletions

2
.gitignore vendored
View File

@@ -32,3 +32,5 @@ coverage
*.sql *.sql
.env .env
*.db *.db
db_data

2
api/.gitignore vendored
View File

@@ -1 +1,3 @@
built built
!migrations/*/*.sql

20
api/database.json Normal file
View File

@@ -0,0 +1,20 @@
{
"dev": {
"driver": "mysql",
"user": "root",
"password": "root",
"host": "localhost",
"database": "ranger_unit_tracker",
"port": "3306",
"multipleStatements": true
},
"prod": {
"driver": "mysql",
"user": {"ENV" : "DB_USERNAME"},
"password": {"ENV" : "DB_PASSWORD"},
"host": {"ENV" : "DB_HOST"},
"database": {"ENV" : "DB_DATABASE"},
"port": {"ENV" : "DB_PORT"},
"multipleStatements": true
}
}

View File

@@ -0,0 +1,53 @@
'use strict';
var dbm;
var type;
var seed;
var fs = require('fs');
var path = require('path');
var Promise;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
Promise = options.Promise;
};
exports.up = function(db) {
var filePath = path.join(__dirname, 'sqls', '20260201154439-initial-up.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);
resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};
exports.down = function(db) {
var filePath = path.join(__dirname, 'sqls', '20260201154439-initial-down.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);
resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};
exports._meta = {
"version": 1
};

View File

@@ -0,0 +1,53 @@
'use strict';
var dbm;
var type;
var seed;
var fs = require('fs');
var path = require('path');
var Promise;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
Promise = options.Promise;
};
exports.up = function(db) {
var filePath = path.join(__dirname, 'sqls', '20260204025935-remove-unused-tables-up.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);
resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};
exports.down = function(db) {
var filePath = path.join(__dirname, 'sqls', '20260204025935-remove-unused-tables-down.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);
resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};
exports._meta = {
"version": 1
};

112185
api/migrations/seed.sql Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
/* Replace with your SQL commands */

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
/* Replace with your SQL commands */

View File

@@ -0,0 +1,5 @@
/* Replace with your SQL commands */
DROP PROCEDURE `sp_update_member_rank_Backup_1-27-2026`;
DROP PROCEDURE `sp_update_member_status_Backup_1-27-2026`;
DROP PROCEDURE `sp_update_member_unit_Backup_1-27-2026`;

880
api/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -9,26 +9,33 @@
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1", "test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsc && tsc-alias && node ./built/api/src/index.js", "dev": "tsc && tsc-alias && node ./built/api/src/index.js",
"build": "tsc && tsc-alias" "prod": "tsc && tsc-alias && node ./built/api/src/index.js",
"build": "tsc && tsc-alias",
"seed": "node ./scripts/seed.js"
}, },
"dependencies": { "dependencies": {
"@rsol/hashmig": "^1.0.7",
"@sentry/node": "^10.27.0", "@sentry/node": "^10.27.0",
"@types/express-session": "^1.18.2", "@types/express-session": "^1.18.2",
"connect-sqlite3": "^0.9.16", "connect-sqlite3": "^0.9.16",
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "^17.2.1", "db-migrate": "^0.11.14",
"db-migrate-mysql": "^3.0.0",
"dotenv": "16.6.1",
"express": "^5.1.0", "express": "^5.1.0",
"express-session": "^1.18.2", "express-session": "^1.18.2",
"mariadb": "^3.4.5", "mariadb": "^3.4.5",
"morgan": "^1.10.1", "morgan": "^1.10.1",
"mysql2": "^3.14.3", "mysql2": "^3.14.3",
"passport": "^0.7.0", "passport": "^0.7.0",
"passport-custom": "^1.1.1",
"passport-openidconnect": "^0.1.2" "passport-openidconnect": "^0.1.2"
}, },
"devDependencies": { "devDependencies": {
"@types/express": "^5.0.3", "@types/express": "^5.0.3",
"@types/morgan": "^1.9.10", "@types/morgan": "^1.9.10",
"@types/node": "^24.8.1", "@types/node": "^24.8.1",
"cross-env": "^10.1.0",
"tsc-alias": "^1.8.16", "tsc-alias": "^1.8.16",
"typescript": "^5.9.3" "typescript": "^5.9.3"
} }

29
api/scripts/migrate.js Normal file
View File

@@ -0,0 +1,29 @@
const dotenv = require('dotenv');
const path = require('path');
const { execSync } = require('child_process');
dotenv.config({ path: path.resolve(process.cwd(), `.env`) });
const db = {
user: process.env.DB_USERNAME,
pass: process.env.DB_PASSWORD,
host: process.env.DB_MIGRATION_HOST,
port: process.env.DB_PORT,
name: process.env.DB_DATABASE,
};
const dbUrl = `mysql://${db.user}:${db.pass}@tcp(${db.host}:${db.port})/${db.name}`;
const args = process.argv.slice(2).join(" ");
const migrations = path.join(process.cwd(), "migrations");
const cmd = [
"docker run --rm",
`-v "${migrations}:/migrations"`,
"migrate/migrate",
"-path=/migrations",
`-database "mysql://${db.user}:${db.pass}@tcp(${db.host}:${db.port})/${db.name}"`, // Use double quotes
args,
].join(" ");
console.log(cmd);
execSync(cmd, { stdio: "inherit" });

33
api/scripts/seed.js Normal file
View File

@@ -0,0 +1,33 @@
const dotenv = require("dotenv");
const path = require("path");
const mariadb = require("mariadb");
const fs = require("fs");
dotenv.config({ path: path.resolve(process.cwd(), `.env`) });
const { DB_HOST, DB_PORT, DB_USERNAME, DB_PASSWORD, DB_DATABASE, APPLICATION_ENVIRONMENT } = process.env;
//do not accidentally seed prod pls
if (APPLICATION_ENVIRONMENT !== "dev") {
console.log("PLEASE DO NOT SEED PROD!!!!");
process.exit(0);
}
(async () => {
const conn = await mariadb.createConnection({
host: DB_HOST,
port: DB_PORT,
user: DB_USERNAME,
password: DB_PASSWORD,
database: DB_DATABASE,
multipleStatements: true,
});
const seedFile = path.join(process.cwd(), "migrations", "seed.sql");
const sql = fs.readFileSync(seedFile, "utf8");
await conn.query(sql);
await conn.end();
console.log("Seeded");
})();

View File

@@ -15,13 +15,32 @@ import { logger } from '../services/logging/logger';
const querystring = require('querystring'); const querystring = require('querystring');
import { performance } from 'perf_hooks'; import { performance } from 'perf_hooks';
import { CacheService } from '../services/cache/cache'; import { CacheService } from '../services/cache/cache';
import { Strategy as CustomStrategy } from 'passport-custom';
function parseJwt(token) { function parseJwt(token) {
return JSON.parse(Buffer.from(token.split('.')[1], 'base64').toString()); return JSON.parse(Buffer.from(token.split('.')[1], 'base64').toString());
} }
passport.use(new OpenIDConnectStrategy({ const devLogin = (req: any, res: any, next: any) => {
// The object here must match what your 'verify' function returns: { memberId }
const devUser = { memberId: 1 }; // Hardcoded ID
req.logIn(devUser, (err: any) => {
if (err) return next(err);
const redirectTo = req.session.redirectTo || process.env.CLIENT_URL;
delete req.session.redirectTo;
return res.redirect(redirectTo);
});
};
if (process.env.AUTH_MODE === "mock") {
passport.use('mock', new CustomStrategy(async (req, done) => {
const mockUser = { memberId: 1 };
return done(null, mockUser);
}))
} else {
passport.use('oidc', new OpenIDConnectStrategy({
issuer: process.env.AUTH_ISSUER, issuer: process.env.AUTH_ISSUER,
authorizationURL: process.env.AUTH_DOMAIN + '/authorize/', authorizationURL: process.env.AUTH_DOMAIN + '/authorize/',
tokenURL: process.env.AUTH_DOMAIN + '/token/', tokenURL: process.env.AUTH_DOMAIN + '/token/',
@@ -30,7 +49,7 @@ passport.use(new OpenIDConnectStrategy({
clientSecret: process.env.AUTH_CLIENT_SECRET, clientSecret: process.env.AUTH_CLIENT_SECRET,
callbackURL: process.env.AUTH_REDIRECT_URI, callbackURL: process.env.AUTH_REDIRECT_URI,
scope: ['openid', 'profile', 'discord'] scope: ['openid', 'profile', 'discord']
}, async function verify(issuer, sub, profile, jwtClaims, accessToken, refreshToken, params, cb) { }, async function verify(issuer, sub, profile, jwtClaims, accessToken, refreshToken, params, cb) {
// console.log('--- OIDC verify() called ---'); // console.log('--- OIDC verify() called ---');
// console.log('issuer:', issuer); // console.log('issuer:', issuer);
@@ -123,19 +142,29 @@ passport.use(new OpenIDConnectStrategy({
} finally { } finally {
if (con) con.release(); if (con) con.release();
} }
})); }));
}
router.get('/login', (req, res, next) => { router.get('/login', (req, res, next) => {
// Store redirect target in session if provided req.session.redirectTo = req.query.redirect as string;
req.session.redirectTo = req.query.redirect;
next(); const strategy = process.env.AUTH_MODE === 'mock' ? 'mock' : 'oidc';
}, passport.authenticate('openidconnect'));
passport.authenticate(strategy, {
successRedirect: (req.session.redirectTo || process.env.CLIENT_URL),
failureRedirect: '/login'
})(req, res, next);
});
router.get('/callback', (req, res, next) => { router.get('/callback', (req, res, next) => {
//escape if mocked
if (process.env.AUTH_MODE === 'mock') {
return res.redirect(process.env.CLIENT_URL || '/');
}
const redirectURI = req.session.redirectTo; const redirectURI = req.session.redirectTo;
passport.authenticate('openidconnect', (err, user) => { passport.authenticate('oidc', (err, user) => {
if (err) return next(err); if (err) return next(err);
if (!user) return res.redirect(process.env.CLIENT_URL); if (!user) return res.redirect(process.env.CLIENT_URL);
@@ -165,12 +194,21 @@ router.get('/logout', [requireLogin], function (req, res, next) {
sameSite: 'lax' sameSite: 'lax'
}); });
if (process.env.AUTH_MODE === 'mock') {
return res.redirect(process.env.CLIENT_URL || '/');
}
var params = { var params = {
client_id: process.env.AUTH_CLIENT_ID, client_id: process.env.AUTH_CLIENT_ID,
returnTo: process.env.CLIENT_URL returnTo: process.env.CLIENT_URL
}; };
res.redirect(process.env.AUTH_END_SESSION_URI + '?' + querystring.stringify(params)); const endSessionUri = process.env.AUTH_END_SESSION_URI;
if (endSessionUri) {
return res.redirect(endSessionUri + '?' + querystring.stringify(params));
} else {
return res.redirect(process.env.CLIENT_URL || '/');
}
}) })
}); });
}); });

View File

@@ -196,9 +196,8 @@ export async function getMembersFull(ids: number[]): Promise<MemberCardDetails[]
status_date: row.status_date, status_date: row.status_date,
loa_until: row.loa_until ? new Date(row.loa_until) : undefined, loa_until: row.loa_until ? new Date(row.loa_until) : undefined,
}; };
// roles comes as array of strings; parse each one // roles comes as array of strings; parse each one
const roles: Role[] = JSON.parse(row.roles).map((r: string) => JSON.parse(r)); const roles: Role[] = row.roles;
return { member, roles }; return { member, roles };
}); });

13
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,13 @@
version: "3.9"
services:
db:
image: mariadb:10.6.23-ubi9
environment:
MARIADB_ROOT_PASSWORD: root
MARIADB_DATABASE: ranger_unit_tracker
MARIADB_USER: dev
MARIADB_PASSWORD: dev
ports:
- "3306:3306"
volumes:
- ./db_data:/var/lib/mysql

54
readme.md Normal file
View File

@@ -0,0 +1,54 @@
## Prerequs
* Node.js
* npm
* Docker + Docker Compose
## Installation
Install dependencies in each workspace:
```
cd ui && npm install
cd ../api && npm install
cd ../shared && npm install
```
## Local Development Setup
From the project root, start required services:
```
docker compose -f docker-compose.dev.yml up
```
Run database setup from `/api`:
```
npm run migrate:up
npm run migrate:seed
```
## Running the App
Start the frontend:
```
cd ui
npm run dev
```
Start the API:
```
cd api
npm run dev
```
* UI runs via Vite
* API runs on Node after TypeScript build
## Notes
* `shared` must have its dependencies installed for both UI and API to work
* `docker-compose.dev.yml` is required for local dev dependencies (e.g. database)