5 Commits

Author SHA1 Message Date
97bda2c8a8 working python 2023-04-07 01:40:42 -07:00
deb145a7ce generally working with Python/Pythia 2023-04-07 01:30:54 -07:00
cc6f6b911c move credentials to settings.json, use RVExtensionArgs call
adds value type definitions for float/int, adds mission, server name, world name to sent metrics
2023-04-04 15:18:44 -07:00
6044126e86 remove extension files (sep repo) 2023-04-02 13:43:20 -07:00
d41cb543d7 adjust to include worldName/serverName as tag values, missionName field 2023-04-02 13:16:08 -07:00
17 changed files with 823 additions and 141 deletions

11
.gitignore vendored
View File

@@ -1,2 +1,11 @@
*.pbo
*.bak
*.bak
*.dll
*.so
extension/RangerMetrics.h
extension/RangerMetrics_x64.h
\@RangerMetrics/settings.json

View File

@@ -4,7 +4,7 @@ class CfgPatches {
weapons[] = {};
requiredVersion = 0.1;
requiredAddons[] = {};
author[] = {"EagleTrooper and Gary"};
author[] = {"EagleTrooper","Gary","IndigoFox"};
authorUrl = "http://example.com";
};
};
@@ -14,9 +14,11 @@ class CfgFunctions {
class Common {
file = "\RangerMetrics\functions";
class postInit { postInit = 1;};
class log {};
class gather {};
class queue {};
class send {};
class run {};
class checkResults {};
class log {};
};
};
};

View File

@@ -0,0 +1,22 @@
{
private _threadId = _x;
private _finished = ["RangerMetrics.influx.has_call_finished", [_threadId]] call py3_fnc_callExtension;
// systemChat str _finished;
if (isNil "_finished") exitWith {
RangerMetrics_activeThreads = RangerMetrics_activeThreads - [_threadId];
[format ["[%1]: Thread %2 not found", RangerMetrics_logPrefix, _threadId], "WARN"] call RangerMetrics_fnc_log;
};
if (_finished isEqualTo []) exitWith {
RangerMetrics_activeThreads = RangerMetrics_activeThreads - [_threadId];
[format ["[%1]: Thread %2 not found", RangerMetrics_logPrefix, _threadId], "WARN"] call RangerMetrics_fnc_log;
};
if (_finished isEqualTo true) then {
RangerMetrics_activeThreads = RangerMetrics_activeThreads - [_threadId];
if (missionNamespace getVariable ["RangerMetrics_debug",false]) then {
private _return = ["RangerMetrics.influx.get_call_value", [_threadId]] call py3_fnc_callExtension;
[format ["%1", _return], "DEBUG"] call RangerMetrics_fnc_log;
};
};
} forEach RangerMetrics_activeThreads;

View File

@@ -0,0 +1,107 @@
/* ----------------------------------------------------------------------------
Function: CBA_fnc_encodeJSON
Description:
Serializes input to a JSON string. Can handle
- ARRAY
- BOOL
- CONTROL
- GROUP
- LOCATION
- NAMESPACE
- NIL (ANY)
- NUMBER
- OBJECT
- STRING
- TASK
- TEAM_MEMBER
- HASHMAP
- Everything else will simply be stringified.
Parameters:
_object - Object to serialize. <ARRAY, ...>
Returns:
_json - JSON string containing serialized object.
Examples:
(begin example)
private _settings = call CBA_fnc_createNamespace;
_settings setVariable ["enabled", true];
private _json = [_settings] call CBA_fnc_encodeJSON;
(end)
Author:
BaerMitUmlaut
---------------------------------------------------------------------------- */
params ["_object"];
if (isNil "_object") exitWith { "null" };
switch (typeName _object) do {
case "SCALAR";
case "BOOL": {
str _object;
};
case "STRING": {
{
_object = [_object, _x#0, _x#1] call CBA_fnc_replace;
} forEach [
["\", "\\"],
["""", "\"""],
[toString [8], "\b"],
[toString [12], "\f"],
[endl, "\n"],
[toString [10], "\n"],
[toString [13], "\r"],
[toString [9], "\t"]
];
// Stringify without escaping inter string quote marks.
"""" + _object + """"
};
case "ARRAY": {
if ([_object] call CBA_fnc_isHash) then {
private _json = (([_object] call CBA_fnc_hashKeys) apply {
private _name = _x;
private _value = [_object, _name] call CBA_fnc_hashGet;
format ["%1: %2", [_name] call CBA_fnc_encodeJSON, [_value] call CBA_fnc_encodeJSON]
}) joinString ", ";
"{" + _json + "}"
} else {
private _json = (_object apply {[_x] call CBA_fnc_encodeJSON}) joinString ", ";
"[" + _json + "]"
};
};
case "HASHMAP": {
private _json = ((_object toArray false) apply {
_x params ["_key", ["_value", objNull]];
if !(_key isEqualType "") then {
_key = str _key;
};
format ["%1: %2", [_key] call CBA_fnc_encodeJSON, [_value] call CBA_fnc_encodeJSON]
}) joinString ", ";
"{" + _json + "}"
};
default {
if !(typeName _object in (supportInfo "u:allVariables*" apply {_x splitString " " select 1})) exitWith {
[str _object] call CBA_fnc_encodeJSON
};
if (isNull _object) exitWith { "null" };
private _json = ((allVariables _object) apply {
private _name = _x;
private _value = _object getVariable [_name, objNull];
format ["%1: %2", [_name] call CBA_fnc_encodeJSON, [_value] call CBA_fnc_encodeJSON]
}) joinString ", ";
"{" + _json + "}"
};
};

View File

@@ -0,0 +1,83 @@
// function adapted from YAINA by MartinCo at http://yaina.eu
params [["_cba",false,[true]]];
if(missionNamespace getVariable ["RangerMetrics_run",false]) then {
private _startTime = diag_tickTime;
// Mission name
["server", "mission_name", [["source", "onLoadName"]], nil, "string", getMissionConfigValue ["onLoadName", ""]] call RangerMetrics_fnc_queue;
["server", "mission_name", [["source", "missionName"]], nil, "string", missionName] call RangerMetrics_fnc_queue;
["server", "mission_name", [["source", "missionNameSource"]], nil, "string", missionNameSource] call RangerMetrics_fnc_queue;
["server", "mission_name", [["source", "briefingName"]], nil, "string", briefingName] call RangerMetrics_fnc_queue;
["server", "server_uptime", nil, nil, "float", diag_tickTime toFixed 2] call RangerMetrics_fnc_queue;
// Number of local units
["simulation", "entity_count", [["entity_type", "unit"], ["only_local", true]], nil, "int", { local _x } count allUnits] call RangerMetrics_fnc_queue;
["simulation", "entity_count", [["entity_type", "group"], ["only_local", true]], nil, "int", { local _x } count allGroups] call RangerMetrics_fnc_queue;
["simulation", "entity_count", [["entity_type", "vehicles"], ["only_local", true]], nil, "int", { local _x} count vehicles] call RangerMetrics_fnc_queue;
// Server Stats
["simulation", "fps", [["metric", "avg"]], nil, "float", diag_fps toFixed 2] call RangerMetrics_fnc_queue;
["simulation", "fps", [["metric", "avg_min"]], nil, "float", diag_fpsMin toFixed 2] call RangerMetrics_fnc_queue;
["simulation", "mission_time", nil, nil, "float", time toFixed 2] call RangerMetrics_fnc_queue;
// Scripts
private _activeScripts = diag_activeScripts;
["simulation", "script_count", [["execution", "spawn"]], nil, "int", _activeScripts select 0] call RangerMetrics_fnc_queue;
["simulation", "script_count", [["execution", "execVM"]], nil, "int", _activeScripts select 1] call RangerMetrics_fnc_queue;
["simulation", "script_count", [["execution", "exec"]], nil, "int", _activeScripts select 2] call RangerMetrics_fnc_queue;
["simulation", "script_count", [["execution", "execFSM"]], nil, "int", _activeScripts select 3] call RangerMetrics_fnc_queue;
private _pfhCount = if(_cba) then {count CBA_common_perFrameHandlerArray} else {0};
["simulation", "script_count", [["execution", "pfh"]], nil, "int", _pfhCount] call RangerMetrics_fnc_queue;
// Globals if server
if (isServer) then {
// Number of global units
["simulation", "entity_count", [["entity_type", "unit"], ["only_local", false]], nil, "int", count allUnits] call RangerMetrics_fnc_queue;
["simulation", "entity_count", [["entity_type", "group"], ["only_local", false]], nil, "int", count allGroups] call RangerMetrics_fnc_queue;
["simulation", "entity_count", [["entity_type", "vehicle"], ["only_local", false]], nil, "int", count vehicles] call RangerMetrics_fnc_queue;
["simulation", "entity_count", [["entity_type", "player"], ["only_local", false]], nil, "int", count allPlayers] call RangerMetrics_fnc_queue;
};
private _headlessClients = entities "HeadlessClient_F";
{
{
private _stats_fps = diag_fps toFixed 2;
private _stats_fps_min = diag_fpsMin toFixed 2;
["simulation", "fps_hc", [["metric", "avg"]], nil, "float", _stats_fps] remoteExec ["RangerMetrics_fnc_queue", 2];
["simulation", "fps_hc", [["metric", "avg_min"]], nil, "float", _stats_fps_min] remoteExec ["RangerMetrics_fnc_queue", 2];
} remoteExecCall ["bis_fnc_call", owner _x];
} foreach _headlessClients;
/** WORKING HEADLESS CODE COMMENTED OUT TO TRY SOMETHING DIFFERNT
// Headless Clients FPS
// Thanks to CPL.Brostrom.A
private _headlessClients = entities "HeadlessClient_F";
{
{
private _stats_fps = round diag_fps;
["stats.HCfps", _stats_fps] remoteExec ["RangerMetrics_fnc_queue", 2];
} remoteExecCall ["bis_fnc_call", owner _x];
} foreach _headlessClients;
*/
// log the runtime and switch off debug so it doesn't flood the log
if(missionNamespace getVariable ["RangerMetrics_debug",false]) then {
[format ["Run time: %1", diag_tickTime - _startTime], "DEBUG"] call RangerMetrics_fnc_log;
// missionNamespace setVariable ["RangerMetrics_debug",false];
};
};

View File

@@ -1,5 +1,9 @@
params [["_text","Log text invalid",[""]], ["_type","INFO",[""]]];
private _textFormatted = format ["[RangerMetrics] %1: %2", _type, _text];
private _textFormatted = format [
"[%1] %2: %3",
RangerMetrics_logPrefix,
_type,
_text];
if(isServer) then {
diag_log text _textFormatted;

View File

@@ -1,23 +1,74 @@
// function adapted from YAINA by MartinCo at http://yaina.eu
if !(isServer || !hasInterface) exitWith {};
// if (!isServer) exitWith {};
_cba = (isClass(configFile >> "CfgPatches" >> "cba_main"));
RangerMetrics_logPrefix = "RangerMetrics";
RangerMetrics_debug = true;
RangerMetrics_activeThreads = [];
RangerMetrics_messageQueue = createHashMap;
[format ["Instance name: %1", profileName]] call RangerMetrics_fnc_log;
[format ["CBA detected: %1", _cba]] call RangerMetrics_fnc_log;
["Initializing v1.1"] call RangerMetrics_fnc_log;
private _settingsLoaded = ["RangerMetrics.influx.load_settings", []] call py3_fnc_callExtension;
if (isNil "_settingsLoaded") exitWith {
["Extension not found, disabling"] call RangerMetrics_fnc_log;
RangerMetrics_run = false;
};
if (_settingsLoaded isEqualTo []) then {
if (count _settingsLoaded == 0) exitWith {
["Settings not loaded, disabling"] call RangerMetrics_fnc_log;
RangerMetrics_run = false;
};
if (_settingsLoaded#0 isEqualTo 1) exitWith {
[
format["Settings not loaded, disabling. %1", _settingsLoaded#1],
"ERROR"
] call RangerMetrics_fnc_log;
RangerMetrics_run = false;
};
};
format["Settings loaded: %1", _settingsLoaded#2] call RangerMetrics_fnc_log;
RangerMetrics_settings = _settingsLoaded#2;
// RangerMetrics_settings = createHashMap;
// private _top = createHashMapFromArray _settingsLoaded#2;
// RangerMetrics_settings set [
// "influxDB",
// createHashMapFromArray (_top get "influxDB")
// ];
// RangerMetrics_settings set [
// "arma3",
// createHashMapFromArray (_top get "refreshRateMs")
// ];
["RangerMetrics.influx.connect_to_influx", []] call py3_fnc_callExtension;
RangerMetrics_run = true;
// addMissionEventHandler ["ExtensionCallback", {
// params ["_name", "_function", "_data"];
// if (_name == "RangerMetrics") then {
// [parseSimpleArray _data] call RangerMetrics_fnc_log;
// };
// }];
if(_cba) then { // CBA is running, use PFH
[RangerMetrics_fnc_run, 10, [_cba]] call CBA_fnc_addPerFrameHandler;
[{
params ["_args", "_idPFH"];
_args params [["_cba", false]];
[_cba] call RangerMetrics_fnc_gather;
call RangerMetrics_fnc_checkResults;
call RangerMetrics_fnc_send;
// }, (RangerMetrics_settings get "arma3" get "refreshRateMs"), [_cba]] call CBA_fnc_addPerFrameHandler;
}, 1, [_cba]] call CBA_fnc_addPerFrameHandler;
} else { // CBA isn't running, use sleep
[_cba] spawn {
params ["_cba"];
while{true} do {
[[_cba]] call RangerMetrics_fnc_run; // nested to match CBA PFH signature
sleep 10;
while {true} do {
[_cba] call RangerMetrics_fnc_gather; // nested to match CBA PFH signature
call RangerMetrics_fnc_checkResults;
call RangerMetrics_fnc_send;
// sleep (RangerMetrics_settings get "arma3" get "refreshRateMs");
sleep 1;
};
};
};

View File

@@ -0,0 +1,42 @@
params [
["_bucket", "default", [""]],
"_measurement",
["_tags", nil, [[], nil]],
["_fields", nil, [[], nil]],
"_valueType",
"_value"
];
private _profileName = profileName;
private _prefix = "Arma3";
private _extSend = [
_measurement, // metric name
_valueType, // float or int
[ // tags
["profile", _profileName],
["world", toLower worldName]
],
[ // fields
["server", serverName],
["mission", missionName],
["value", _value]
]
];
if (!isNil "_tags") then {
{
(_extSend select 2) pushBack [_x#0, _x#1];
} forEach _tags;
};
if (!isNil "_fields") then {
{
(_extSend select 3) pushBack [_x#0, _x#1];
} forEach _fields;
};
// add to queue
(RangerMetrics_messageQueue getOrDefault [_bucket, [], true]) pushBack _extSend;
true

View File

@@ -1,92 +0,0 @@
// function adapted from YAINA by MartinCo at http://yaina.eu
params ["_args"];
_args params [["_cba",false,[true]]];
if(missionNamespace getVariable ["RangerMetrics_run",false]) then {
private _startTime = diag_tickTime;
// Mission Name
// private _missionName = missionName;
// ["missionName", _missionName] call RangerMetrics_fnc_send;
// World Name
// private _worldName = worldName;
// ["worldName", _worldName] call RangerMetrics_fnc_send;
// Server Name
// private _serverName = serverName;
// ["serverName", _serverName] call RangerMetrics_fnc_send;
// Number of local units
["count.units", { local _x } count allUnits] call RangerMetrics_fnc_send;
["count.groups", { local _x } count allGroups] call RangerMetrics_fnc_send;
["count.vehicles", { local _x} count vehicles] call RangerMetrics_fnc_send;
// Server Stats
["stats.fps", round diag_fps] call RangerMetrics_fnc_send;
["stats.fpsMin", round diag_fpsMin] call RangerMetrics_fnc_send;
["stats.uptime", round diag_tickTime] call RangerMetrics_fnc_send;
["stats.missionTime", round time] call RangerMetrics_fnc_send;
// Scripts
private _activeScripts = diag_activeScripts;
["scripts.spawn", _activeScripts select 0] call RangerMetrics_fnc_send;
["scripts.execVM", _activeScripts select 1] call RangerMetrics_fnc_send;
["scripts.exec", _activeScripts select 2] call RangerMetrics_fnc_send;
["scripts.execFSM", _activeScripts select 3] call RangerMetrics_fnc_send;
private _pfhCount = if(_cba) then {count CBA_common_perFrameHandlerArray} else {0};
["scripts.pfh", _pfhCount] call RangerMetrics_fnc_send;
// Globals if server
if (isServer) then {
// Number of local units
["count.units", count allUnits, true] call RangerMetrics_fnc_send;
["count.groups", count allGroups, true] call RangerMetrics_fnc_send;
["count.vehicles", count vehicles, true] call RangerMetrics_fnc_send;
["count.players", count allPlayers, true] call RangerMetrics_fnc_send;
};
private _headlessClients = entities "HeadlessClient_F";
{
{
private _stats_fps = round diag_fps;
["stats.HCfps", _stats_fps] remoteExec ["RangerMetrics_fnc_send", 2];
} remoteExecCall ["bis_fnc_call", owner _x];
} foreach _headlessClients;
/** WORKING HEADLESS CODE COMMENTED OUT TO TRY SOMETHING DIFFERNT
// Headless Clients FPS
// Thanks to CPL.Brostrom.A
private _headlessClients = entities "HeadlessClient_F";
{
{
private _stats_fps = round diag_fps;
["stats.HCfps", _stats_fps] remoteExec ["RangerMetrics_fnc_send", 2];
} remoteExecCall ["bis_fnc_call", owner _x];
} foreach _headlessClients;
*/
// log the runtime and switch off debug so it doesn't flood the log
if(missionNamespace getVariable ["RangerMetrics_debug",false]) then {
[format ["Run time: %1", diag_tickTime - _startTime], "DEBUG"] call RangerMetrics_fnc_log;
missionNamespace setVariable ["RangerMetrics_debug",false];
};
};

View File

@@ -1,41 +1,50 @@
params ["_metric", "_value", ["_global", false]];
private _profileName = profileName;
private _prefix = "Arma3";
private _metricPath = [format["%1,%2", _profileName, profileName], format["%1,%2", _profileName, "global"]] select _global;
// InfluDB settings
private _connection = "http://INFLUX_URL:8086";
private _token = "XXXXXXXXXXXXXXXXXXXXXXXXXXXX_AUTH_TOKEN_XXXXXXXXXXXXXXXXXXXXXXXXXXX";
private _org = "XXX_INFLUX_ORG_XXXXXX";
private _bucket = "XXX_BUCKET_NAME";
private _extSend = format["%1,%2", format["%1,%2,%3,%4,%5,%6", _connection, _token, _org, _bucket, _metricPath, _metric], _value];
if(missionNamespace getVariable ["RangerMetrics_debug",false]) then {
[format ["Sending a3influx data: %1", _extSend], "DEBUG"] call RangerMetrics_fnc_log;
};
// send the data
private _return = "a3influx" callExtension _extSend;
[{
if(missionNamespace getVariable ["RangerMetrics_debug",false]) then {
[format ["Sending a3influx data: %1", RangerMetrics_messageQueue], "DEBUG"] call RangerMetrics_fnc_log;
};
// shouldn't be possible, the extension should always return even if error
if(isNil "_return") exitWith {
[format ["return was nil (%1)", _extSend], "ERROR"] call RangerMetrics_fnc_log;
false
};
// duplicate the message queue so we can clear it before sending the data
private _extSend = + RangerMetrics_messageQueue;
RangerMetrics_messageQueue = createHashMap;
// extension error codes
if(_return in ["invalid metric value","malformed, could not find separator"] ) exitWith {
[format ["%1 (%2)", _return, _extSend], "ERROR"] call RangerMetrics_fnc_log;
false
};
{
// for each bucket, send data to extension
private _bucketName = _x;
private _bucketData = _y;
// if (true) exitWith {
[format ["bucketName: %1", _bucketName], "DEBUG"] call RangerMetrics_fnc_log;
[format ["bucketData: %1", _bucketData], "DEBUG"] call RangerMetrics_fnc_log;
// };
private _return = ["RangerMetrics.influx.write_influx", [[_bucketName, _bucketData]]] call py3_fnc_callExtension;
// success, only show if debug is set
if(missionNamespace getVariable ["RangerMetrics_debug",false]) then {
_returnArgs = _return splitString (toString [10,32]);
[format ["a3influx return data: %1",_returnArgs], "DEBUG"] call RangerMetrics_fnc_log;
};
// shouldn't be possible, the extension should always return even if error
if(isNil "_return") exitWith {
[format ["return was nil (%1)", _extSend], "ERROR"] call RangerMetrics_fnc_log;
false
};
true
if (typeName _return != "ARRAY") exitWith {
[format ["return was not an array (%1)", _extSend], "ERROR"] call RangerMetrics_fnc_log;
false
};
if (count _return == 0) exitWith {
[format ["return was empty (%1)", _extSend], "ERROR"] call RangerMetrics_fnc_log;
false
};
if (count _return == 2) exitWith {
[format ["return was error (%1)", _extSend], "ERROR"] call RangerMetrics_fnc_log;
false
};
// success, add to list of active threads
RangerMetrics_activeThreads pushBack (_return select 0);
// success, only show if debug is set
if (missionNamespace getVariable ["RangerMetrics_debug",false]) then {
[format ["a3influx threadId: %1", _return], "DEBUG"] call RangerMetrics_fnc_log;
};
} forEach _extSend;
}] call CBA_fnc_execNextFrame;

View File

@@ -0,0 +1,164 @@
// get basic config properties
private _properties = [
["settings_mission_info", [
"author",
"onLoadName",
"onLoadMission",
"loadScreen",
"header",
"onLoadIntro",
"onLoadMissionTime",
"onLoadIntroTime",
"briefingName",
"overviewPicture",
"overviewText",
"overviewTextLocked",
"onBriefingGear",
"onBriefingGroup",
"onBriefingPlan"
]],
["settings_respawn", [
"respawn",
"respawnButton",
"respawnDelay",
"respawnVehicleDelay",
"respawnDialog",
"respawnOnStart",
"respawnTemplates",
"respawnWeapons",
"respawnMagazines",
"reviveMode",
"reviveUnconsciousStateMode",
"reviveRequiredTrait",
"reviveRequiredItems",
"reviveRequiredItemsFakConsumed",
"reviveMedicSpeedMultiplier",
"reviveDelay",
"reviveForceRespawnDelay",
"reviveBleedOutDelay",
"enablePlayerAddRespawn"
]],
["settings_player_ui", [
"overrideFeedback",
"showHUD",
"showCompass",
"showGPS",
"showGroupIndicator",
"showMap",
"showNotePad",
"showPad",
"showWatch",
"showUAVFeed",
"showSquadRadar"
]],
["settings_corpse_and_wreck", [
"corpseManagerMode",
"corpseLimit",
"corpseRemovalMinTime",
"corpseRemovalMaxTime",
"wreckManagerMode",
"wreckLimit",
"wreckRemovalMinTime",
"wreckRemovalMaxTime",
"minPlayerDistance"
]],
["settings_mission_general", [
"aiKills",
"briefing",
"debriefing",
"disableChannels",
"disabledAI",
"disableRandomization",
"enableDebugConsole",
"enableItemsDropping",
"enableTeamSwitch",
"forceRotorLibSimulation",
"joinUnassigned",
"minScore",
"avgScore",
"maxScore",
"onCheat",
"onPauseScript",
"saving",
"scriptedPlayer",
"skipLobby",
"HostDoesNotSkipLobby",
"missionGroup"
]
]
];
private _propertyValues = createHashMap;
{
private _category = _x#0;
private _values = _x#1;
{
private _property = _x;
private _value = (missionConfigFile >> _property) call BIS_fnc_getCfgData;
hint str [_category, _property, _value];
if (!isNil "_value") then {
if (typeName _value == "ARRAY") then {
_value = _value joinString ",";
};
if (isNil {_propertyValues get _category}) then {
_propertyValues set [_category, createHashMap];
};
_propertyValues get _category set [_property, _value];
};
} forEach _values;
} forEach _properties;
// Take the generated hashmap and queue metrics
{
private _measurementCategory = _x;
private _fields = _y;
["config", _measurementCategory, nil, _fields, "int", 0] call RangerMetrics_fnc_queue;
} forEach _propertyValues;
// get all properties in missionConfigFile (recursive)
// private _nextCfgClasses = "true" configClasses (missionConfigFile);
// private _nextCfgProperties = configProperties [missionConfigFile];
// private _cfgProperties = createHashMap;
// while {count _nextCfgClasses > 0} do {
// {
// private _thisConfig = _x;
// private _thisConfigClasses = "true" configClasses _thisConfig;
// _thisCfgProperties = configProperties [_thisConfig, "!isClass _x"];
// _saveHash = createHashMap;
// {
// _propertyCfg = _x;
// _saveHash set [configName _propertyCfg, (_propertyCfg) call BIS_fnc_getCfgData];
// } forEach _thisCfgProperties;
// _hierarchy = (configHierarchy _thisConfig);
// _hierarchy deleteAt 0;
// _hierarchy = _hierarchy apply {configName _x};
// _hierarchyStr = _hierarchy joinString ".";
// _hierarchyStrParent = (_hierarchy select [0, count _hierarchy - 2]) joinString ".";
// systemChat _hierarchyStrParent;
// // if (_cfgProperties get _hierarchyStrParent == nil) then {
// // _cfgProperties set [_hierarchyStrParent, createHashMap];
// // };
// _cfgProperties set [_hierarchyStr, _saveHash];
// // _cfgProperties set [_hierarchy, _saveHash];
// _nextCfgClasses append _thisConfigClasses;
// } forEach _nextCfgClasses;
// _nextCfgClasses = _nextCfgClasses - _cfgClasses;
// };
// text ([_cfgProperties] call RangerMetrics_fnc_encodeJSON);
// iterate through _cfgProperties hashmap and queue metrics
// {
// } forEach _cfgProperties;

View File

@@ -0,0 +1 @@
RangerMetrics

View File

@@ -0,0 +1,3 @@
from . import influx
influx

View File

@@ -0,0 +1,190 @@
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
import threading
from pyproj import Transformer
from datetime import datetime
import json
import os
from .threading_utils import (
call_slow_function,
has_call_finished,
get_call_value,
THREADS,
THREAD_ID,
)
# get parent of parent directory (mod dir)
MOD_DIR = (
os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
.lstrip("\\")
.lstrip("?")
.lstrip("\\")
)
SETTINGS_FILE = ""
SETTINGS = None
DBCLIENT = None
WRITE_API = None
PROCESS_LOG = MOD_DIR + "\\rangermetrics_process.log"
ERROR_LOG = MOD_DIR + "\\rangermetrics_error.log"
DATA_LOG = MOD_DIR + "\\rangermetrics_data.log"
# TRANSFORMER = Transformer.from_crs("epsg:3857", "epsg:4326")
def get_dir():
# get current dir without leading or trailing slashes
this_path = (
os.path.dirname(os.path.realpath(__file__))
.lstrip("\\")
.lstrip("?")
.lstrip("\\")
)
return [0, "Current directory", this_path, PROCESS_LOG]
def load_settings():
# check if settings.json exists in MOD_DIR
if not (os.path.isfile(os.path.join(MOD_DIR, "settings.json"))):
return [1, "settings.json not found in mod directory", MOD_DIR]
global SETTINGS_FILE
SETTINGS_FILE = os.path.join(MOD_DIR, "settings.json")
# import settings from settings.json
global SETTINGS
with open(SETTINGS_FILE, "r") as f:
SETTINGS = json.load(f)
settings_validation = [
["influxdb", "host"],
["influxdb", "token"],
["influxdb", "org"],
["influxdb", "defaultBucket"],
["arma3", "refreshRateMs"],
]
for setting in settings_validation:
if not (setting[0] in SETTINGS and setting[1] in SETTINGS[setting[0]]):
return [1, f"Missing setting: {setting[0]} {setting[1]}"]
# prep settings out to hashMap style list for A3
# [[key, [subkey, subvalue], [subkey, subvalue]]]
settings_out = []
for key, value in SETTINGS.items():
if isinstance(value, dict):
this_values = []
for subkey, subvalue in value.items():
this_values.append([subkey, subvalue])
settings_out.append([key, this_values])
else:
settings_out.append([key, value])
return [0, "Settings loaded", settings_out]
def connect_to_influx():
global DBCLIENT
DBCLIENT = influxdb_client.InfluxDBClient(
url=SETTINGS["influxdb"]["host"],
token=SETTINGS["influxdb"]["token"],
org=SETTINGS["influxdb"]["org"],
enable_gzip=True,
)
if DBCLIENT is None:
return [1, "Error connecting to InfluxDB"]
global WRITE_API
WRITE_API = DBCLIENT.write_api(write_options=SYNCHRONOUS)
if WRITE_API is None:
return [1, "Error connecting to InfluxDB"]
return [0, "Connected to InfluxDB"]
def test_data(data):
with open("influxdb_data.log", "a") as f:
f.write(str(data) + "\n")
f.write(f"{datetime.now()}: {data[2]}\n")
# convert to dict from list of key, value pairs
# format [[key, value], [key, value]] to {key: value, key: value}
measurement, tag_set, field_set, position = data
tag_dict = dict(tag_set)
field_dict = dict(field_set)
f.write(
f"{datetime.now()}: {measurement}, {json.dumps(tag_dict, indent=2)}, {json.dumps(field_dict, indent=2)}, {position}\n"
)
# thread the write to influxdb
return [data, dict(data[1])]
def log_process(line):
# log the process to a file
with open(PROCESS_LOG, "a+") as f:
f.write(f"{datetime.now()}: {line}\n")
return True
def log_error(line):
# log errors to a file
with open(ERROR_LOG, "a+") as f:
f.write(f"{datetime.now()}: {line}\n")
return True
def write_influx(data):
# thread the write to influxdb
thread_id = call_slow_function(write_influx_async, (data,))
return [thread_id]
def write_influx_async(data):
processed = []
timestamp = f" {int(datetime.now().timestamp() * 1e9)}"
# return [data]
target_bucket = data[0] or SETTINGS["influxdb"]["defaultBucket"]
log_process(f"Writing to bucket {target_bucket}")
log_process(f"Processing {len(data)} data points")
for point in data[1]:
measurement = point[0]
value_type = point[1]
tag_dict = dict(point[2])
field_dict = dict(point[3])
if value_type == "int":
field_dict["value"] = int(field_dict["value"])
elif value_type == "float":
field_dict["value"] = float(field_dict["value"])
point_dict = {
"measurement": measurement,
"tags": tag_dict,
"fields": field_dict,
}
processed.append(point_dict)
log_process(f"Writing {len(processed)} data points")
try:
result = WRITE_API.write(target_bucket, SETTINGS["influxdb"]["org"], processed)
if result is not None:
log_process(f"Wrote {len(processed)} data points")
except Exception as e:
# write to file
log_error(f"Error writing to influxdb: {e}")
return [1, f"Error writing to influxdb: {e}"]
success_count = len(processed)
# free up memory
del data
del processed
del timestamp
return [0, f"Wrote {success_count} data points successfully"]
has_call_finished # noqa imported functions
get_call_value # noqa imported functions

View File

@@ -0,0 +1 @@
influxdb-client

View File

@@ -0,0 +1,80 @@
import sys
import threading
# https://stackoverflow.com/a/65447493/6543759
class ThreadWithResult(threading.Thread):
def __init__(
self, group=None, target=None, name=None, args=(), kwargs=None, *, daemon=None
):
self.exc = None
if not kwargs:
kwargs = {}
def function():
self.exc = None
try:
self.result = target(*args, **kwargs)
except: # noqa
# Save details of the exception thrown but don't rethrow,
# just complete the function
self.exc = sys.exc_info()
super().__init__(group=group, target=function, name=name, daemon=daemon)
# https://stackoverflow.com/a/12223550/6543759
def join(self, *args, **kwargs):
super().join(*args, **kwargs)
if self.exc:
msg = "Thread '%s' threw an exception: %s" % (self.getName(), self.exc[1])
new_exc = Exception(msg)
raise new_exc.with_traceback(self.exc[2])
THREADS = {}
THREAD_ID = 0
def call_slow_function(function, args):
global THREADS, THREAD_ID
thread = ThreadWithResult(target=function, args=args, daemon=True)
THREAD_ID += 1
THREADS[THREAD_ID] = thread
thread.start()
return THREAD_ID
def has_call_finished(thread_id):
global THREADS
thread = THREADS[thread_id]
if thread.is_alive():
# Thread is still working
return False
# Thread has finished, we can return its value using get_call_value()
return True
def get_call_value(thread_id):
global THREADS
thread = THREADS[thread_id]
if thread.is_alive():
# Thread is still working
raise ValueError("Thread is still running!")
# Thread has finished, we can return its value now
try:
thread.join()
finally:
del THREADS[thread_id]
try:
return thread.result
except AttributeError:
raise RuntimeError(
'The thread does not have the "result" attribute. An unhandled error occurred inside your Thread'
)

View File

@@ -0,0 +1,6 @@
{
"host" : "http://INFLUX_URL:8086",
"token": "XXXXXXXXXXXXXXXXXXXXXXXXXXXX_AUTH_TOKEN_XXXXXXXXXXXXXXXXXXXXXXXXXXX",
"org" : "ORG_NAME",
"bucket" : "BUCKET_NAME",
}