devcontainers #176

Merged
hizumi merged 20 commits from devcontainers into main 2026-02-03 20:16:13 -06:00
15 changed files with 114809 additions and 112 deletions

4
.gitignore vendored
View File

@@ -31,4 +31,6 @@ coverage
*.sql
.env
*.db
*.db
db_data

4
api/.gitignore vendored
View File

@@ -1 +1,3 @@
built
built
!migrations/*/*.sql

20
api/database.json Normal file
View File

@@ -0,0 +1,20 @@
{
"dev": {
"driver": "mysql",
"user": "root",
"password": "root",
"host": "localhost",
"database": "ranger_unit_tracker",
"port": "3306",
"multipleStatements": true
},
"prod": {
"driver": "mysql",
"user": {"ENV" : "DB_USERNAME"},
"password": {"ENV" : "DB_PASSWORD"},
"host": {"ENV" : "DB_HOST"},
"database": {"ENV" : "DB_DATABASE"},
"port": {"ENV" : "DB_PORT"},
"multipleStatements": true
}
}

View File

@@ -0,0 +1,53 @@
'use strict';
var dbm;
var type;
var seed;
var fs = require('fs');
var path = require('path');
var Promise;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
Promise = options.Promise;
};
exports.up = function(db) {
var filePath = path.join(__dirname, 'sqls', '20260201154439-initial-up.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);
resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};
exports.down = function(db) {
var filePath = path.join(__dirname, 'sqls', '20260201154439-initial-down.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);
resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};
exports._meta = {
"version": 1
};

112185
api/migrations/seed.sql Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
/* Replace with your SQL commands */

File diff suppressed because it is too large Load Diff

880
api/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -9,27 +9,34 @@
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsc && tsc-alias && node ./built/api/src/index.js",
"build": "tsc && tsc-alias"
"prod": "tsc && tsc-alias && node ./built/api/src/index.js",
"build": "tsc && tsc-alias",
"seed": "node ./scripts/seed.js"
},
"dependencies": {
"@rsol/hashmig": "^1.0.7",
"@sentry/node": "^10.27.0",
"@types/express-session": "^1.18.2",
"connect-sqlite3": "^0.9.16",
"cors": "^2.8.5",
"dotenv": "^17.2.1",
"db-migrate": "^0.11.14",
"db-migrate-mysql": "^3.0.0",
"dotenv": "16.6.1",
"express": "^5.1.0",
"express-session": "^1.18.2",
"mariadb": "^3.4.5",
"morgan": "^1.10.1",
"mysql2": "^3.14.3",
"passport": "^0.7.0",
"passport-custom": "^1.1.1",
"passport-openidconnect": "^0.1.2"
},
"devDependencies": {
"@types/express": "^5.0.3",
"@types/morgan": "^1.9.10",
"@types/node": "^24.8.1",
"cross-env": "^10.1.0",
"tsc-alias": "^1.8.16",
"typescript": "^5.9.3"
}
}
}

29
api/scripts/migrate.js Normal file
View File

@@ -0,0 +1,29 @@
const dotenv = require('dotenv');
const path = require('path');
const { execSync } = require('child_process');
dotenv.config({ path: path.resolve(process.cwd(), `.env`) });
const db = {
user: process.env.DB_USERNAME,
pass: process.env.DB_PASSWORD,
host: process.env.DB_MIGRATION_HOST,
port: process.env.DB_PORT,
name: process.env.DB_DATABASE,
};
const dbUrl = `mysql://${db.user}:${db.pass}@tcp(${db.host}:${db.port})/${db.name}`;
const args = process.argv.slice(2).join(" ");
const migrations = path.join(process.cwd(), "migrations");
const cmd = [
"docker run --rm",
`-v "${migrations}:/migrations"`,
"migrate/migrate",
"-path=/migrations",
`-database "mysql://${db.user}:${db.pass}@tcp(${db.host}:${db.port})/${db.name}"`, // Use double quotes
args,
].join(" ");
console.log(cmd);
execSync(cmd, { stdio: "inherit" });

33
api/scripts/seed.js Normal file
View File

@@ -0,0 +1,33 @@
const dotenv = require("dotenv");
const path = require("path");
const mariadb = require("mariadb");
const fs = require("fs");
dotenv.config({ path: path.resolve(process.cwd(), `.env`) });
const { DB_HOST, DB_PORT, DB_USERNAME, DB_PASSWORD, DB_DATABASE, APPLICATION_ENVIRONMENT } = process.env;
//do not accidentally seed prod pls
if (APPLICATION_ENVIRONMENT !== "dev") {
console.log("PLEASE DO NOT SEED PROD!!!!");
process.exit(0);
}
(async () => {
const conn = await mariadb.createConnection({
host: DB_HOST,
port: DB_PORT,
user: DB_USERNAME,
password: DB_PASSWORD,
database: DB_DATABASE,
multipleStatements: true,
});
const seedFile = path.join(process.cwd(), "migrations", "seed.sql");
const sql = fs.readFileSync(seedFile, "utf8");
await conn.query(sql);
await conn.end();
console.log("Seeded");
})();

View File

@@ -15,127 +15,156 @@ import { logger } from '../services/logging/logger';
const querystring = require('querystring');
import { performance } from 'perf_hooks';
import { CacheService } from '../services/cache/cache';
import { Strategy as CustomStrategy } from 'passport-custom';
function parseJwt(token) {
return JSON.parse(Buffer.from(token.split('.')[1], 'base64').toString());
}
passport.use(new OpenIDConnectStrategy({
issuer: process.env.AUTH_ISSUER,
authorizationURL: process.env.AUTH_DOMAIN + '/authorize/',
tokenURL: process.env.AUTH_DOMAIN + '/token/',
userInfoURL: process.env.AUTH_DOMAIN + '/userinfo/',
clientID: process.env.AUTH_CLIENT_ID,
clientSecret: process.env.AUTH_CLIENT_SECRET,
callbackURL: process.env.AUTH_REDIRECT_URI,
scope: ['openid', 'profile', 'discord']
}, async function verify(issuer, sub, profile, jwtClaims, accessToken, refreshToken, params, cb) {
const devLogin = (req: any, res: any, next: any) => {
// The object here must match what your 'verify' function returns: { memberId }
const devUser = { memberId: 1 }; // Hardcoded ID
// console.log('--- OIDC verify() called ---');
// console.log('issuer:', issuer);
// console.log('sub:', sub);
// // console.log('discord:', discord);
// console.log('profile:', profile);
// console.log('jwt: ', parseJwt(jwtClaims));
// console.log('params:', params);
let con;
req.logIn(devUser, (err: any) => {
if (err) return next(err);
const redirectTo = req.session.redirectTo || process.env.CLIENT_URL;
delete req.session.redirectTo;
return res.redirect(redirectTo);
});
};
try {
con = await pool.getConnection();
if (process.env.AUTH_MODE === "mock") {
passport.use('mock', new CustomStrategy(async (req, done) => {
const mockUser = { memberId: 1 };
return done(null, mockUser);
}))
} else {
passport.use('oidc', new OpenIDConnectStrategy({
issuer: process.env.AUTH_ISSUER,
authorizationURL: process.env.AUTH_DOMAIN + '/authorize/',
tokenURL: process.env.AUTH_DOMAIN + '/token/',
userInfoURL: process.env.AUTH_DOMAIN + '/userinfo/',
clientID: process.env.AUTH_CLIENT_ID,
clientSecret: process.env.AUTH_CLIENT_SECRET,
callbackURL: process.env.AUTH_REDIRECT_URI,
scope: ['openid', 'profile', 'discord']
}, async function verify(issuer, sub, profile, jwtClaims, accessToken, refreshToken, params, cb) {
await con.beginTransaction();
// console.log('--- OIDC verify() called ---');
// console.log('issuer:', issuer);
// console.log('sub:', sub);
// // console.log('discord:', discord);
// console.log('profile:', profile);
// console.log('jwt: ', parseJwt(jwtClaims));
// console.log('params:', params);
let con;
//lookup existing user
const existing = await con.query(`SELECT id FROM members WHERE authentik_issuer = ? AND authentik_sub = ? LIMIT 1;`, [issuer, sub]);
let memberId: number | null = null;
//if member exists
if (existing.length > 0) {
//login
memberId = existing[0].id;
logger.info('auth', `Existing member login`, {
memberId,
issuer,
});
try {
con = await pool.getConnection();
} else {
//otherwise: create account mode
const jwt = parseJwt(jwtClaims);
const discordID = jwt.discord?.id as number;
await con.beginTransaction();
//check if account is available to claim
if (discordID)
memberId = await mapDiscordtoID(discordID);
if (discordID && memberId) {
// claim account
const result = await con.query(
`UPDATE members SET authentik_sub = ?, authentik_issuer = ? WHERE id = ?;`,
[sub, issuer, memberId]
)
logger.info('auth', `Existing member claimed via Discord`, {
//lookup existing user
const existing = await con.query(`SELECT id FROM members WHERE authentik_issuer = ? AND authentik_sub = ? LIMIT 1;`, [issuer, sub]);
let memberId: number | null = null;
//if member exists
if (existing.length > 0) {
//login
memberId = existing[0].id;
logger.info('auth', `Existing member login`, {
memberId,
discordID,
issuer,
});
} else {
// new account
const username = sub.username;
const result = await con.query(
`INSERT INTO members (name, authentik_sub, authentik_issuer) VALUES (?, ?, ?)`,
[username, sub, issuer]
)
memberId = Number(result.insertId);
//otherwise: create account mode
const jwt = parseJwt(jwtClaims);
const discordID = jwt.discord?.id as number;
logger.info('auth', `New member account created`, {
memberId,
username,
issuer,
});
//check if account is available to claim
if (discordID)
memberId = await mapDiscordtoID(discordID);
if (discordID && memberId) {
// claim account
const result = await con.query(
`UPDATE members SET authentik_sub = ?, authentik_issuer = ? WHERE id = ?;`,
[sub, issuer, memberId]
)
logger.info('auth', `Existing member claimed via Discord`, {
memberId,
discordID,
issuer,
});
} else {
// new account
const username = sub.username;
const result = await con.query(
`INSERT INTO members (name, authentik_sub, authentik_issuer) VALUES (?, ?, ?)`,
[username, sub, issuer]
)
memberId = Number(result.insertId);
logger.info('auth', `New member account created`, {
memberId,
username,
issuer,
});
}
}
}
await con.query(`UPDATE members SET last_login = ? WHERE id = ?`, [toDateTime(new Date()), memberId])
await con.query(`UPDATE members SET last_login = ? WHERE id = ?`, [toDateTime(new Date()), memberId])
await con.commit();
return cb(null, { memberId });
} catch (error) {
logger.error('auth', `Authentication transaction failed`, {
issuer,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
});
await con.commit();
return cb(null, { memberId });
} catch (error) {
logger.error('auth', `Authentication transaction failed`, {
issuer,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
});
if (con) {
try {
await con.rollback();
} catch (rollbackError) {
logger.error('auth', `Rollback failed`, {
error: rollbackError instanceof Error
? rollbackError.message
: String(rollbackError),
});
if (con) {
try {
await con.rollback();
} catch (rollbackError) {
logger.error('auth', `Rollback failed`, {
error: rollbackError instanceof Error
? rollbackError.message
: String(rollbackError),
});
}
}
return cb(error);
} finally {
if (con) con.release();
}
return cb(error);
} finally {
if (con) con.release();
}
}));
}));
}
router.get('/login', (req, res, next) => {
// Store redirect target in session if provided
req.session.redirectTo = req.query.redirect;
req.session.redirectTo = req.query.redirect as string;
next();
}, passport.authenticate('openidconnect'));
const strategy = process.env.AUTH_MODE === 'mock' ? 'mock' : 'oidc';
passport.authenticate(strategy, {
successRedirect: (req.session.redirectTo || process.env.CLIENT_URL),
failureRedirect: '/login'
})(req, res, next);
});
router.get('/callback', (req, res, next) => {
//escape if mocked
if (process.env.AUTH_MODE === 'mock') {
return res.redirect(process.env.CLIENT_URL || '/');
}
const redirectURI = req.session.redirectTo;
passport.authenticate('openidconnect', (err, user) => {
passport.authenticate('oidc', (err, user) => {
if (err) return next(err);
if (!user) return res.redirect(process.env.CLIENT_URL);
@@ -165,12 +194,21 @@ router.get('/logout', [requireLogin], function (req, res, next) {
sameSite: 'lax'
});
if (process.env.AUTH_MODE === 'mock') {
return res.redirect(process.env.CLIENT_URL || '/');
}
var params = {
client_id: process.env.AUTH_CLIENT_ID,
returnTo: process.env.CLIENT_URL
};
res.redirect(process.env.AUTH_END_SESSION_URI + '?' + querystring.stringify(params));
const endSessionUri = process.env.AUTH_END_SESSION_URI;
if (endSessionUri) {
return res.redirect(endSessionUri + '?' + querystring.stringify(params));
} else {
return res.redirect(process.env.CLIENT_URL || '/');
}
})
});
});

View File

@@ -196,9 +196,8 @@ export async function getMembersFull(ids: number[]): Promise<MemberCardDetails[]
status_date: row.status_date,
loa_until: row.loa_until ? new Date(row.loa_until) : undefined,
};
// roles comes as array of strings; parse each one
const roles: Role[] = JSON.parse(row.roles).map((r: string) => JSON.parse(r));
const roles: Role[] = row.roles;
return { member, roles };
});

13
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,13 @@
version: "3.9"
services:
db:
image: mariadb:10.6.23-ubi9
environment:
MARIADB_ROOT_PASSWORD: root
MARIADB_DATABASE: ranger_unit_tracker
MARIADB_USER: dev
MARIADB_PASSWORD: dev
ports:
- "3306:3306"
volumes:
- ./db_data:/var/lib/mysql

54
readme.md Normal file
View File

@@ -0,0 +1,54 @@
## Prerequs
* Node.js
* npm
* Docker + Docker Compose
## Installation
Install dependencies in each workspace:
```
cd ui && npm install
cd ../api && npm install
cd ../shared && npm install
```
## Local Development Setup
From the project root, start required services:
```
docker compose -f docker-compose.dev.yml up
```
Run database setup from `/api`:
```
npm run migrate:up
npm run migrate:seed
```
## Running the App
Start the frontend:
```
cd ui
npm run dev
```
Start the API:
```
cd api
npm run dev
```
* UI runs via Vite
* API runs on Node after TypeScript build
## Notes
* `shared` must have its dependencies installed for both UI and API to work
* `docker-compose.dev.yml` is required for local dev dependencies (e.g. database)