working. refining schema, fix double quotes in tag values

This commit is contained in:
2023-04-09 19:02:57 -07:00
parent 4d8d8e44f5
commit 98f5339fef
32 changed files with 900 additions and 399 deletions

View File

@@ -10,6 +10,34 @@ class CfgPatches {
}; };
class CfgFunctions { class CfgFunctions {
class RangerMetrics_cDefinitions {
class functions {
file = "\RangerMetrics\functions\captureDefinitions";
class server_poll {};
class server_missionEH {};
class client_poll {};
// class clientEvent {};
};
};
class RangerMetrics_capture {
// these names represent measurement names send to InfluxDB - snake case
class functions {
file = "\RangerMetrics\functions\capture";
class chat_message {};
class entities_global {};
class entities_local {};
class mission_config_file {};
class player_identity {};
class player_loadout {};
class player_performance {};
class player_status {};
class running_mission {};
class running_scripts {};
class server_performance {};
class server_time {};
class weather {};
};
};
class RangerMetrics { class RangerMetrics {
class core { class core {
file = "\RangerMetrics\functions\core"; file = "\RangerMetrics\functions\core";
@@ -18,11 +46,14 @@ class CfgFunctions {
class log {}; class log {};
class queue {}; class queue {};
class send {}; class send {};
class callbackHandler {};
class sendClientPoll {};
class startServerPoll {};
}; };
class eventHandlers { class eventHandlers {
file = "\RangerMetrics\functions\eventHandlers"; file = "\RangerMetrics\functions\eventHandlers";
class addHandlers {}; class addHandlers {};
class callbackHandler {};
}; };
class helpers { class helpers {
file = "\RangerMetrics\functions\helpers"; file = "\RangerMetrics\functions\helpers";
@@ -31,19 +62,5 @@ class CfgFunctions {
class stringReplace {}; class stringReplace {};
class unixTimestamp {}; class unixTimestamp {};
}; };
class measurements {
file = "\RangerMetrics\functions\measurements";
class chat_message {};
class entities_global {};
class entities_local {};
class mission_config_file {};
class player_identity {};
class player_performance {};
class player_status {};
class running_mission {};
class running_scripts {};
class server_performance {};
class server_time {};
};
}; };
}; };

View File

@@ -0,0 +1,40 @@
params ["_channel", "_owner", "_from", "_text", "_person", "_name", "_strID", "_forcedDisplay", "_isPlayerMessage", "_sentenceType", "_chatMessageType"];
private _fields = [
["int", "channel", _channel],
["int", "owner", _owner],
["string", "from", _from],
["string", "text", _text],
// ["object", "person", _person],
["string", "name", _name],
["string", "strID", _strID],
["bool", "forcedDisplay", _forcedDisplay],
["bool", "isPlayerMessage", _isPlayerMessage],
["int", "sentenceType", _sentenceType],
["int", "chatMessageType", _chatMessageType]
];
// we need special processing to ensure the object is valid and we have a playerUid. Line protocol doesn't support empty string
private "_playerUid";
if (isNil "_person") then {
_playerUid = "";
} else {
if !(objNull isEqualType _person) then {
_playerUid = getPlayerUID _person;
} else {
_playerUid = "";
};
};
if (_playerUid isNotEqualTo "") then {
_fields pushBack ["string", "playerUid", _playerUid];
};
[
"server_events",
"HandleChatMessage",
nil,
_fields
] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,7 @@
// Number of global units
["server_state", "entities_global", nil, [
["int", "units_alive", count allUnits ],
["int", "units_dead", count allDeadMen],
["int", "groups_total", count allGroups],
["int", "vehicles_total", count vehicles]
]] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,7 @@
// Number of local units
["server_state", "entities_local", nil, [
["int", "units_alive", { local _x} count allUnits ],
["int", "units_dead", { local _x } count allDeadMen],
["int", "groups_total", { local _x } count allGroups],
["int", "vehicles_total", { local _x } count vehicles]
]] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,210 @@
// get basic config properties
private _properties = [
["mission_info", [
"author",
"onLoadName",
"onLoadMission",
"loadScreen",
// "header",
"gameType",
"minPlayers",
"maxPlayers",
"onLoadIntro",
"onLoadMissionTime",
"onLoadIntroTime",
"briefingName",
"overviewPicture",
"overviewText",
"overviewTextLocked"
]],
["respawn", [
"respawn",
"respawnButton",
"respawnDelay",
"respawnVehicleDelay",
"respawnDialog",
"respawnOnStart",
"respawnTemplates",
"respawnTemplatesWest",
"respawnTemplatesEast",
"respawnTemplatesGuer",
"respawnTemplatesCiv",
"respawnWeapons",
"respawnMagazines",
"reviveMode",
"reviveUnconsciousStateMode",
"reviveRequiredTrait",
"reviveRequiredItems",
"reviveRequiredItemsFakConsumed",
"reviveMedicSpeedMultiplier",
"reviveDelay",
"reviveForceRespawnDelay",
"reviveBleedOutDelay",
"enablePlayerAddRespawn"
]],
["player_ui", [
"overrideFeedback",
"showHUD",
"showCompass",
"showGPS",
"showGroupIndicator",
"showMap",
"showNotePad",
"showPad",
"showWatch",
"showUAVFeed",
"showSquadRadar"
]],
["corpse_and_wreck", [
"corpseManagerMode",
"corpseLimit",
"corpseRemovalMinTime",
"corpseRemovalMaxTime",
"wreckManagerMode",
"wreckLimit",
"wreckRemovalMinTime",
"wreckRemovalMaxTime",
"minPlayerDistance"
]],
["mission_settings", [
"aiKills",
"briefing",
"debriefing",
"disableChannels",
"disabledAI",
"disableRandomization",
"enableDebugConsole",
"enableItemsDropping",
"enableTeamSwitch",
"forceRotorLibSimulation",
"joinUnassigned",
"minScore",
"avgScore",
"maxScore",
"onCheat",
"onPauseScript",
"saving",
"scriptedPlayer",
"skipLobby",
"HostDoesNotSkipLobby",
"missionGroup"
]
]
];
private _propertyValues = createHashMap;
// recursively walk through missionConfigFile and get all properties into a single hashmap
// iterate through list of categories with desired property names
// if the property exists in the extracted missionConfigFile property hash, save it with the category into _propertyValues
{
private _category = _x#0;
private _values = _x#1;
{
private _property = _x;
private _value = (missionConfigFile >> _property) call BIS_fnc_getCfgData;
// hint str [_category, _property, _value];
if (!isNil "_value") then {
if (typeName _value == "ARRAY") then {
_value = _value joinString ",";
};
if (isNil {_propertyValues get _category}) then {
_propertyValues set [_category, createHashMap];
};
_propertyValues get _category set [_property, _value];
};
} forEach _values;
} forEach _properties;
// Take the generated hashmap of custom-categorized configuration properties and queue them for metrics
{
private _measurementCategory = _x;
private _fields = _y;
private _fieldsWithType = [];
// InfluxDB lookup hash
_types = createHashMapFromArray [
["STRING", "string"],
["ARRAY", "string"],
["SCALAR", "float"],
["BOOL", "bool"]
];
// Preprocess the fields to clean the raw data
{
private _fieldName = _x;
private _fieldValue = _y;
private _fieldType = _types get (typeName _fieldValue);
// turn ARRAY into string since Influx can't take them
if (typeName _fieldValue == "ARRAY") then {
_fieldValue = _fieldValue joinString "|";
};
// convert 0 or 1 (from config) to BOOL
if (typeName _fieldValue == "SCALAR" && _fieldValue in [0, 1]) then {
_fieldType = "bool";
if (_fieldValue == 0) then {
_fieldValue = "false";
} else {
_fieldValue = "true";
};
};
_fieldsWithType pushBack [_fieldType, _fieldName, _fieldValue];
} forEach _fields;
// finally, send the data
[
"config_state",
"mission_config_file",
[
["category", _measurementCategory]
],
_fieldsWithType
] call RangerMetrics_fnc_queue;
} forEach _propertyValues;
// get all properties in missionConfigFile (recursive)
// private _nextCfgClasses = "true" configClasses (missionConfigFile);
// private _nextCfgProperties = configProperties [missionConfigFile];
// private _cfgProperties = createHashMap;
// while {count _nextCfgClasses > 0} do {
// {
// private _thisConfig = _x;
// private _thisConfigClasses = "true" configClasses _thisConfig;
// _thisCfgProperties = configProperties [_thisConfig, "!isClass _x"];
// _saveHash = createHashMap;
// {
// _propertyCfg = _x;
// _saveHash set [configName _propertyCfg, (_propertyCfg) call BIS_fnc_getCfgData];
// } forEach _thisCfgProperties;
// _hierarchy = (configHierarchy _thisConfig);
// _hierarchy deleteAt 0;
// _hierarchy = _hierarchy apply {configName _x};
// _hierarchyStr = _hierarchy joinString ".";
// _hierarchyStrParent = (_hierarchy select [0, count _hierarchy - 2]) joinString ".";
// systemChat _hierarchyStrParent;
// // if (_cfgProperties get _hierarchyStrParent == nil) then {
// // _cfgProperties set [_hierarchyStrParent, createHashMap];
// // };
// _cfgProperties set [_hierarchyStr, _saveHash];
// // _cfgProperties set [_hierarchy, _saveHash];
// _nextCfgClasses append _thisConfigClasses;
// } forEach _nextCfgClasses;
// _nextCfgClasses = _nextCfgClasses - _cfgClasses;
// };
// text ([_cfgProperties] call RangerMetrics_fnc_encodeJSON);
// iterate through _cfgProperties hashmap and queue metrics
// {
// } forEach _cfgProperties;

View File

@@ -0,0 +1,30 @@
params ["_playerID", "_ownerId", "_playerUID", "_profileName", "_displayName", "_steamName", "_clientState", "_isHC", "_adminState", "_networkInfo", "_unit", ["_jip", false]];
// _networkInfo params ["_avgPing", "_avgBandwidth", "_desync"];
private _fields = [
["string", "playerID", _playerID],
["string", "ownerId", _ownerId],
["string", "playerUID", _playerUID],
["string", "profileName", _profileName],
["string", "displayName", _displayName],
["string", "steamName", _steamName],
["bool", "isHC", _isHC],
["bool", "isJip", _jip]
];
if (!isNil "_unit") then {
private _roleDescription = roleDescription _unit;
if (_roleDescription isNotEqualTo "") then {
_fields pushBack ["string", "roleDescription", _roleDescription];
};
};
[
"player_state",
"player_identity",
[
["string", "playerUID", getPlayerUID player]
],
_fields,
nil
] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,44 @@
// loadout data, captured clientside
if (isNull player) exitWith {};
params ["_handleName"];
private _lastLoadout = player getVariable "RangerMetrics_myLoadout";
if (isNil "_lastLoadout") then {
_lastLoadout = [];
};
private _currentLoadout = [
["string", "currentWeapon", currentWeapon player],
["string", "uniform", uniform player],
["string", "vest", vest player],
["string", "backpack", backpack player],
["string", "headgear", headgear player],
["string", "goggles", goggles player],
["string", "hmd", hmd player],
["string", "primaryWeapon", primaryWeapon player],
["string", "primaryWeaponMagazine", primaryWeaponMagazine player],
["string", "secondaryWeapon", secondaryWeapon player],
["string", "secondaryWeaponMagazine", secondaryWeaponMagazine player],
["string", "handgunWeapon", handgunWeapon player],
["string", "handgunMagazine", handgunMagazine player]
];
// exit if loadout hasn't changed
if (_lastLoadout isEqualTo _currentLoadout) exitWith {};
// continue if loadout has changed
// store loadout data locally
player setVariable ["RangerMetrics_myLoadout", _currentLoadout];
// send loadout data to server
[
"player_state", // bucket to store the data
"player_loadout", // measurement classifier inside of bucket
[ // tags
["string", "playerUID", getPlayerUID player]
],
_currentLoadout, // fields
nil
] remoteExec ["RangerMetrics_fnc_queue", 2];

View File

@@ -0,0 +1,17 @@
{
_x params ["_playerID", "_ownerId", "_playerUID", "_profileName", "_displayName", "_steamName", "_clientState", "_isHC", "_adminState", "_networkInfo", "_unit"];
_networkInfo params ["_avgPing", "_avgBandwidth", "_desync"];
[
"player_state",
"player_performance",
[["string", "playerUID", _playerUID]],
[
["float", "avgPing", _avgPing],
["float", "avgBandwidth", _avgBandwidth],
["float", "desync", _desync]
],
["server"]
] call RangerMetrics_fnc_queue;
} forEach (allUsers apply {getUserInfo _x});

View File

@@ -0,0 +1,12 @@
params ["_playerID", "_ownerId", "_playerUID", "_profileName", "_displayName", "_steamName", "_clientState", "_isHC", "_adminState", "_networkInfo", "_unit"];
// _networkInfo params ["_avgPing", "_avgBandwidth", "_desync"];
["player_state", "player_status",
[["string", "playerUID", _playerUID]],
[
["int", "clientStateNumber", _clientState],
["int", "adminState", _adminState]
],
nil
] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,16 @@
// Mission name
[
"server_state", // bucket to store the data
"running_mission", // measurement classifier inside of bucket
nil, // tags
[ // fields
[
"string",
"onLoadName",
getMissionConfigValue ["onLoadName", ""]
],
["string","briefingName", briefingName],
["string","missionName", missionName],
["string","missionNameSource", missionNameSource]
]
] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,7 @@
["server_state", "running_scripts", nil, [
["int", "spawn", diag_activeScripts select 0],
["int", "execVM", diag_activeScripts select 1],
["int", "exec", diag_activeScripts select 2],
["int", "execFSM", diag_activeScripts select 3],
["int", "pfh", if (RangerMetrics_cbaPresent) then {count CBA_common_perFrameHandlerArray} else {0}]
]] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,4 @@
["server_state", "server_performance", nil, [
["float", "avg", diag_fps toFixed 2],
["float", "min", diag_fpsMin toFixed 2]
]] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,6 @@
["server_state", "server_time", nil, [
["float", "diag_tickTime", diag_tickTime toFixed 2],
["float", "serverTime", time toFixed 2],
["float", "timeMultiplier", timeMultiplier toFixed 2],
["float", "accTime", accTime toFixed 2]
]] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,19 @@
[
"server_state", // bucket to store the data
"weather", // measurement classifier inside of bucket
nil, // tags
[ // fields
["float", "fog", fog],
["float", "overcast", overcast],
["float", "rain", rain],
["float", "humidity", humidity],
["float", "waves", waves],
["float", "windDir", windDir],
["float", "windStr", windStr],
["float", "gusts", gusts],
["float", "lightnings", lightnings],
["float", "moonIntensity", moonIntensity],
["float", "moonPhase", moonPhase date],
["float", "sunOrMoon", sunOrMoon]
]
] call RangerMetrics_fnc_queue;

View File

@@ -0,0 +1,11 @@
[
[
5, // Poll interval in seconds
[ // Array of things to poll on clients
[
"RangerMetrics_poll_loadout", // Name of localNamespace variable to save the handler as on clients
RangerMetrics_capture_fnc_player_loadout // Function to call
]
]
]
]

View File

@@ -0,0 +1,81 @@
[
["MPEnded", {
private ["_winner", "_reason"];
_winner = "Unknown";
_reason = "Mission Complete";
["server_events", "MPEnded", nil, [
["string", "winner", _winner],
["string", "reason", _reason]
]] call RangerMetrics_fnc_queue;
call RangerMetrics_capture_fnc_running_mission;
}],
["OnUserConnected", {
params ["_networkId", "_clientStateNumber", "_clientState"];
(getUserInfo _networkId) call RangerMetrics_capture_fnc_player_identity;
(getUserInfo _networkId) call RangerMetrics_capture_fnc_player_status;
["server_events", "UserConnected", nil, [
["string", "networkId", _networkId],
["int", "clientStateNumber", _clientStateNumber],
["string", "clientState", _clientState]
]] call RangerMetrics_fnc_queue;
}],
["OnUserDisconnected", {
params ["_networkId", "_clientStateNumber", "_clientState"];
(getUserInfo _networkId) call RangerMetrics_capture_fnc_player_identity;
(getUserInfo _networkId) call RangerMetrics_capture_fnc_player_status;
["server_events", "OnUserDisconnected", nil, [
["string", "networkId", _networkId],
["int", "clientStateNumber", _clientStateNumber],
["string", "clientState", _clientState]
]] call RangerMetrics_fnc_queue;
}],
["PlayerConnected", {
params ["_id", "_uid", "_name", "_jip", "_owner", "_idstr"];
(getUserInfo _idstr) call RangerMetrics_capture_fnc_player_identity;
(getUserInfo _idstr) call RangerMetrics_capture_fnc_player_status;
["server_events", "PlayerConnected", nil, [
["int", "id", _id],
["string", "uid", _uid],
["string", "name", _name],
["bool", "jip", _jip],
["int", "owner", _owner],
["string", "idstr", _idstr]
]] call RangerMetrics_fnc_queue;
}],
["PlayerDisconnected", {
params ["_id", "_uid", "_name", "_jip", "_owner", "_idstr"];
(getUserInfo _idstr) call RangerMetrics_capture_fnc_player_identity;
(getUserInfo _idstr) call RangerMetrics_capture_fnc_player_status;
["server_events", "PlayerDisconnected", nil, [
["int", "id", _id],
["string", "uid", _uid],
["string", "name", _name],
["bool", "jip", _jip],
["int", "owner", _owner],
["string", "idstr", _idstr]
]] call RangerMetrics_fnc_queue;
}],
["OnUserClientStateChanged", {
params ["_networkId", "_clientStateNumber", "_clientState"];
(getUserInfo _networkId) call RangerMetrics_capture_fnc_player_status;
["server_events", "OnUserClientStateChanged", nil, [
["string", "networkId", _networkId],
["int", "clientStateNumber", _clientStateNumber],
["string", "clientState", _clientState]
]] call RangerMetrics_fnc_queue;
}],
["OnUserAdminStateChanged", {
params ["_networkId", "_loggedIn", "_votedIn"];
(getUserInfo _networkId) call RangerMetrics_capture_fnc_player_status;
["server_events", "OnUserAdminStateChanged", nil, [
["string", "networkId", _networkId],
["bool", "loggedIn", _loggedIn],
["bool", "votedIn", _votedIn]
]] call RangerMetrics_fnc_queue;
}],
["HandleChatMessage", {
_this call RangerMetrics_capture_fnc_chat_message;
// don't interfaere with the chat message
false;
}]
]

View File

@@ -0,0 +1,19 @@
[
[
1, // interval
[ // functions to run
RangerMetrics_capture_fnc_server_performance,
RangerMetrics_capture_fnc_running_scripts,
RangerMetrics_capture_fnc_server_time,
RangerMetrics_capture_fnc_entities_local,
RangerMetrics_capture_fnc_entities_global,
RangerMetrics_capture_fnc_player_performance
]
],
[
60,
[
RangerMetrics_capture_fnc_weather
]
]
]

View File

@@ -0,0 +1,14 @@
params ["_name", "_function", "_data"];
if (_name == "RangerMetrics") then {
if (isNil "_data") then {_data = ""};
try {
if (_data isEqualType "") exitWith {
_data = parseSimpleArray _data;
_data call RangerMetrics_fnc_log;
};
diag_log format ["Callback unsupported type: %1: %2", _function, _data];
} catch {
_data = format ["%1", _data];
};
};

View File

@@ -1,21 +1,18 @@
// function adapted from YAINA by MartinCo at http://yaina.eu if (!RangerMetrics_run) exitWith {};
if (
missionNamespace getVariable ["RangerMetrics_run",false]
) then {
private _startTime = diag_tickTime; private _startTime = diag_tickTime;
call RangerMetrics_fnc_server_performance; call RangerMetrics_capture_fnc_server_performance;
call RangerMetrics_fnc_running_scripts; call RangerMetrics_capture_fnc_running_scripts;
call RangerMetrics_fnc_server_time; call RangerMetrics_capture_fnc_server_time;
call RangerMetrics_capture_fnc_weather;
call RangerMetrics_fnc_entities_local; call RangerMetrics_capture_fnc_entities_local;
call RangerMetrics_fnc_entities_global; call RangerMetrics_capture_fnc_entities_global;
private _allUsers = allUsers apply {getUserInfo _x}; private _allUsers = allUsers apply {getUserInfo _x};
{ {
_x call RangerMetrics_fnc_player_performance; _x call RangerMetrics_capture_fnc_player_performance;
_x call RangerMetrics_fnc_player_status;
} forEach _allUsers; } forEach _allUsers;
// log the runtime and switch off debug so it doesn't flood the log // log the runtime and switch off debug so it doesn't flood the log
@@ -25,4 +22,3 @@ if (
[format ["Run time: %1", diag_tickTime - _startTime], "DEBUG"] call RangerMetrics_fnc_log; [format ["Run time: %1", diag_tickTime - _startTime], "DEBUG"] call RangerMetrics_fnc_log;
// missionNamespace setVariable ["RangerMetrics_debug",false]; // missionNamespace setVariable ["RangerMetrics_debug",false];
}; };
};

View File

@@ -10,7 +10,6 @@ RangerMetrics_run = false;
RangerMetrics_activeThreads = []; RangerMetrics_activeThreads = [];
RangerMetrics_messageQueue = createHashMap; RangerMetrics_messageQueue = createHashMap;
RangerMetrics_sendBatchHandle = scriptNull; RangerMetrics_sendBatchHandle = scriptNull;
RangerMetrics_captureBatchHandle = scriptNull;
[format ["Instance name: %1", profileName]] call RangerMetrics_fnc_log; [format ["Instance name: %1", profileName]] call RangerMetrics_fnc_log;
[format ["CBA detected: %1", RangerMetrics_cbaPresent]] call RangerMetrics_fnc_log; [format ["CBA detected: %1", RangerMetrics_cbaPresent]] call RangerMetrics_fnc_log;
@@ -64,58 +63,71 @@ addMissionEventHandler ["ExtensionCallback", {
_this call RangerMetrics_fnc_callbackHandler; _this call RangerMetrics_fnc_callbackHandler;
}]; }];
RangerMetrics_initialized = true;
RangerMetrics_run = true; // define the metrics to capture by sideloading definition files
// this keeps the main file clean and easy to read
// the definition files are in the format of a hashmap, where the key is the category and the value is an array of arrays, where each sub-array is a capture definition
RangerMetrics_captureDefinitions = createHashMapFromArray [
[
"ServerEvent",
createHashMapFromArray [
[
"MissionEventHandlers",
call RangerMetrics_cDefinitions_fnc_server_missionEH
]
]],
["ClientEvent", []],
[
"ServerPoll",
call RangerMetrics_cDefinitions_fnc_server_poll
],
[
"ClientPoll",
call RangerMetrics_cDefinitions_fnc_client_poll
]
];
call RangerMetrics_fnc_addHandlers; // add missionEventHandlers on server
{_x params ["_handleName", "_code"];
missionNamespace setVariable [
("RangerMetrics" + "_MEH_" + _handleName),
(addMissionEventHandler [_handleName, _code])
];
} forEach ((RangerMetrics_captureDefinitions get "ServerEvent") get "MissionEventHandlers");
// begin server polling
{
_x call RangerMetrics_fnc_startServerPoll;
} forEach (RangerMetrics_captureDefinitions get "ServerPoll");
// remoteExec client polling - send data to start handles
{
_x call RangerMetrics_fnc_sendClientPoll;
} forEach (RangerMetrics_captureDefinitions get "ClientPoll");
// {
// } forEach (call RangerMetrics_captureDefinitions_fnc_clientEvent);
// begin client polling
if (RangerMetrics_cbaPresent) then { // CBA is running, use PFH
/* // start sending
This capture method is dynamic.
Every 5 seconds, two script handles are checked. One is for capturing, one is for sending.
The capturing script will go through and capture data, getting nanosecond precision timestamps from the extension to go alongside each data point, then saving it to a queue. It will go through all assigned interval-based checks then exit, and on the next interval of this parent PFH, the capturing script will be spawned again.
The queue is a hashmap where keys are buckets and values are arrays of data points in [string] line protocol format.
The sending script will go through and send data, sending it in batches per bucket and per 2000 data points, as the max extension call with args is 2048 elements.
The sending script will also check if the queue is empty, and if it is, it will exit. This means scriptDone will be true, and on the next interval of this parent PFH, the sending script will be spawned again.
This system means that capture and sending are occurring in the scheduled environment, not blocking the server, while maintaining the timestamps of when each point was captured. The cycles of each will only occur at most once per 5 seconds, leaving plenty of time, and there will never be more than one call for each at a time.
*/
[{ [{
params ["_args", "_idPFH"]; params ["_args", "_idPFH"];
if (scriptDone RangerMetrics_captureBatchHandle) then {
RangerMetrics_captureBatchHandle = [] spawn RangerMetrics_fnc_captureLoop;
};
if (scriptDone RangerMetrics_sendBatchHandle) then { if (scriptDone RangerMetrics_sendBatchHandle) then {
RangerMetrics_sendBatchHandle = [] spawn RangerMetrics_fnc_send; RangerMetrics_sendBatchHandle = [] spawn RangerMetrics_fnc_send;
}; };
}, 5, []] call CBA_fnc_addPerFrameHandler; }, 2, []] call CBA_fnc_addPerFrameHandler;
RangerMetrics_initialized = true;
RangerMetrics_run = true;
call RangerMetrics_capture_fnc_running_mission;
// runs on interval
// [{
// params ["_args", "_idPFH"];
// RangerMetrics_unixTime = (parseSimpleArray ("RangerMetrics" callExtension "getUnixTimeNano")) select 0;
// // spawn RangerMetrics_fnc_captureLoop;
// // call RangerMetrics_fnc_send;
// }, 3, []] call CBA_fnc_addPerFrameHandler;
} else { // CBA isn't running, use sleep
[] spawn {
while {true} do {
RangerMetrics_unixTime = (parseSimpleArray ("RangerMetrics" callExtension "getUnixTimeNano")) select 0;
call RangerMetrics_fnc_captureLoop; // nested to match CBA PFH signature
sleep 1;
if (RangerMetrics_sendBatchHandle != -1) exitWith {
RangerMetrics_sendBatchHandle = [] spawn RangerMetrics_fnc_send;
};
if (scriptDone RangerMetrics_sendBatchHandle) exitWith {
RangerMetrics_sendBatchHandle = -1;
};
};
};
};

View File

@@ -2,7 +2,8 @@ params [
["_bucket", "default", [""]], ["_bucket", "default", [""]],
"_measurement", "_measurement",
["_tags", [], [[], nil]], ["_tags", [], [[], nil]],
["_fields", [], [[], nil]] ["_fields", [], [[], nil]],
["_tagContext", ["profile", "server"], [[]]]
]; ];
@@ -13,16 +14,38 @@ params [
// (_tags apply {format['%1=%2', _x#0, _x#1]}) joinString "," // (_tags apply {format['%1=%2', _x#0, _x#1]}) joinString ","
// ], // ],
_tags pushback ["string", "profileName", profileName]; if (_tagContext find "profile" > -1) then {
_tags pushBack ["string", "profileName", profileName];
};
if (_tagContext find "world" > -1) then {
_tags pushBack ["string", "world", toLower worldName];
};
if (_tagContext find "server" > -1) then {
_tags pushBack ["string", "connectedServer", RangerMetrics_serverProfileName]; _tags pushBack ["string", "connectedServer", RangerMetrics_serverProfileName];
};
private _outTags = _tags apply {
[_x, "tag"] call RangerMetrics_fnc_toLineProtocol
} select {!isNil "_x"};
// having no tags is OK
_outTags = _outTags joinString ",";
private _outFields = _fields apply {
[_x, "field"] call RangerMetrics_fnc_toLineProtocol
} select {!isNil "_x"};
// having no fields will cause an error
if (count _outFields isEqualTo 0) exitWith {};
_outFields = _outFields joinString ",";
private _extSend = format [ private _extSend = format [
"%1,%2 %3 %4", "%1,%2 %3 %4",
_measurement, // metric name _measurement, // metric name
(_tags apply {_x call RangerMetrics_fnc_toLineProtocol}) joinString ",", _outTags,
(_fields apply {_x call RangerMetrics_fnc_toLineProtocol}) joinString ",", _outFields,
call RangerMetrics_fnc_unixTimestamp call RangerMetrics_fnc_unixTimestamp
]; ];

View File

@@ -31,6 +31,25 @@ if (
missionNamespace getVariable ["RangerMetrics_debug",false] missionNamespace getVariable ["RangerMetrics_debug",false]
) then { ) then {
[format ["Bucket: %1, RecordsCount: %2", _bucket, count _processing], "DEBUG"] call RangerMetrics_fnc_log; [format ["Bucket: %1, RecordsCount: %2", _bucket, count _processing], "DEBUG"] call RangerMetrics_fnc_log;
// get unique measurement IDs
private _measurements = [];
{
_thisMeasurement = _x splitString "," select 0;
_measurements pushBack _thisMeasurement;
} forEach _processing;
// get counts of each measurement
private _measurementCounts = [];
{
private _measurement = _x;
_measurementCounts pushBack [
_measurement,
count (_measurements select {_x == _measurement})
];
} forEach _measurements;
[format ["Measurements: %1", _measurementCounts], "DEBUG"] call RangerMetrics_fnc_log;
}; };
"RangerMetrics" callExtension ["sendToInflux", flatten [_bucket, _processing]]; "RangerMetrics" callExtension ["sendToInflux", flatten [_bucket, _processing]];

View File

@@ -0,0 +1,36 @@
// format [interval, [[handleName, code], [handleName, code], ...]]
[_this, {
if !(hasInterface || isDedicated) exitWith {};
params [
["_interval", 5, [5]],
["_pollItems", []]
];
{
_x params [
"_handleName",
["_code", {}, [{}]]
];
private _runningCBA = (isClass(configFile >> "CfgPatches" >> "cba_main"));
if (_runningCBA) then {
localNamespace setVariable [
_handleName,
[_code, _interval, _handleName] call CBA_fnc_addPerFrameHandler
];
} else {
localNamespace setVariable [
_handleName,
[_handleName, _interval] spawn {
params [
"_handleName",
"_interval"
];
while {true} do {
[_handleName] call _code;
sleep _interval;
};
}
];
};
} forEach _pollItems;
}] remoteExec ["call", [0, -2] select isDedicated, true];

View File

@@ -0,0 +1,60 @@
params [
["_interval", 5, [0]],
["_functions", [], [[]]]
];
private _captureHandleName = format ["RangerMetrics_captureBatchHandle_%1", _interval];
if (RangerMetrics_cbaPresent) then { // CBA is running, use PFH
/*
This capture method is dynamic.
Every 5 seconds, two script handles are checked. One is for capturing, one is for sending.
The capturing script will go through and capture data, getting nanosecond precision timestamps from the extension to go alongside each data point, then saving it to a queue. It will go through all assigned interval-based checks then exit, and on the next interval of this parent PFH, the capturing script will be spawned again.
The queue is a hashmap where keys are buckets and values are arrays of data points in [string] line protocol format.
The sending script will go through and send data, sending it in batches per bucket and per 2000 data points, as the max extension call with args is 2048 elements.
The sending script will also check if the queue is empty, and if it is, it will exit. This means scriptDone will be true, and on the next interval of this parent PFH, the sending script will be spawned again.
This system means that capture and sending are occurring in the scheduled environment, not blocking the server, while maintaining the timestamps of when each point was captured. The cycles of each will only occur at most once per 2 seconds, leaving plenty of time, and there will never be more than one call for each at a time.
*/
[{
params ["_args", "_idPFH"];
_args params ["_captureHandleName", "_functions"];
if (!RangerMetrics_run) exitWith {};
// use spawn
// if (scriptDone _captureHandleName) then {
// missionNamespace setVariable [
// _captureHandleName,
// [_functions] spawn {
// {
// call _x;
// } forEach _this;
// }
// ];
// };
// call direct
[format["Running %1 functions for %2", count _functions, _captureHandleName], "DEBUG"] call RangerMetrics_fnc_log;
{
call _x;
} forEach _functions;
}, _interval, [_captureHandleName, _functions]] call CBA_fnc_addPerFrameHandler;
} else { // CBA isn't running, use sleep
[_interval, _functions] spawn {
params ["_interval", "_functions"];
while {true} do {
if (!RangerMetrics_run) exitWith {};
{
call _x;
} forEach _functions;
sleep _interval;
};
};
};

View File

@@ -1,11 +1,5 @@
addMissionEventHandler ["MPEnded", { addMissionEventHandler ["MPEnded", {
private ["_winner", "_reason"];
_winner = "Unknown";
_reason = "Mission Complete";
["server_events", "MPEnded", nil, [
["string", "winner", _winner],
["string", "reason", _reason]
]] call RangerMetrics_fnc_queue;
}]; }];
addMissionEventHandler ["OnUserConnected", { addMissionEventHandler ["OnUserConnected", {
@@ -37,7 +31,7 @@ addMissionEventHandler ["PlayerConnected", {
["int", "id", _id], ["int", "id", _id],
["string", "uid", _uid], ["string", "uid", _uid],
["string", "name", _name], ["string", "name", _name],
["bool", "jip", _jip], ["bool", "isJip", _jip],
["int", "owner", _owner], ["int", "owner", _owner],
["string", "idstr", _idstr] ["string", "idstr", _idstr]
]] call RangerMetrics_fnc_queue; ]] call RangerMetrics_fnc_queue;
@@ -50,7 +44,7 @@ addMissionEventHandler ["PlayerDisconnected", {
["int", "id", _id], ["int", "id", _id],
["string", "uid", _uid], ["string", "uid", _uid],
["string", "name", _name], ["string", "name", _name],
["bool", "jip", _jip], ["bool", "isJip", _jip],
["int", "owner", _owner], ["int", "owner", _owner],
["string", "idstr", _idstr] ["string", "idstr", _idstr]
]] call RangerMetrics_fnc_queue; ]] call RangerMetrics_fnc_queue;

View File

@@ -1,17 +1,63 @@
params [ params ["_line", ["_section", "field", [""]]];
_line params [
["_valueType", "string", [""]], ["_valueType", "string", [""]],
"_key", ["_key", "", [""]],
"_value" "_value"
]; ];
// debug // debug
// diag_log format["%1=%2", _key, _value]; // diag_log format["%1=%2", _key, _value];
if (_value isEqualTo "") exitWith { if (isNil "_value") exitWith {
""; nil;
}; };
if (_valueType isEqualTo "string") exitWith { if (_value isEqualTo "") exitWith {
format['%1="%2"', _key, _value]; nil
}; };
format['%1=%2', _key, _value]; if (_value isEqualType []) then {
_value = _value joinString ",";
// replace double quotes with single quotes
_value = [_value, '""', "'"] call RangerMetrics_fnc_stringReplace;
};
if (_section isEqualTo "tag") exitWith {
switch (_valueType) do {
case "string": {
_value = [_value, ',', "\,"] call RangerMetrics_fnc_stringReplace;
_value = [_value, '=', "\="] call RangerMetrics_fnc_stringReplace;
_value = [_value, ' ', "\ "] call RangerMetrics_fnc_stringReplace;
_value = format['%1="%2"', _key, _value];
};
case "int": {
_value = format['%1=%2i', _key, _value];
};
case "bool": {
_value = format['%1=%2', _key, ['true', 'false'] select _value];
};
case "float": {
_value = format['%1=%2', _key, _value];
};
};
_value;
};
if (_section isEqualTo "field") exitWith {
switch (_valueType) do {
case "string": {
_value = [_value, '\', "\\"] call RangerMetrics_fnc_stringReplace;
_value = [_value, '"', '\"'] call RangerMetrics_fnc_stringReplace;
_value = format['%1="%2"', _key, _value];
};
case "int": {
_value = format['%1=%2i', _key, _value];
};
case "bool": {
_value = format['%1=%2', _key, ['true', 'false'] select _value];
};
case "float": {
_value = format['%1=%2', _key, _value];
};
};
_value;
};

View File

@@ -1,7 +1,19 @@
@startuml classDiagram classDiagram
class server_state { class server_state {
BUCKET BUCKET
} }
class server_events {
Measurement MPEnded
Measurement OnUserConnected
Measurement OnUserDisconnected
Measurement PlayerConnected
Measurement PlayerDisconnected
Measurement OnUserClientStateChanged
Measurement OnUserAdminStateChanged
Meausrement HandleChatMessage
}
server_state --> running_mission server_state --> running_mission
class running_mission { class running_mission {
tag string profileName tag string profileName
@@ -12,8 +24,8 @@
field string briefingName field string briefingName
} }
server_state --> time server_state --> server_time
class time { class server_time {
tag string profileName tag string profileName
tag string connectedServer tag string connectedServer
field float diag_tickTime field float diag_tickTime
@@ -101,14 +113,16 @@
] ]
} }
' link fields in each category %% ' link fields in each category
mission_config_file --> mission_info mission_config_file --> mission_info
class mission_info { class mission_info {
tag string profileName
tag string connectedServer
field string author field string author
field string onLoadName field string onLoadName
field string onLoadMission field string onLoadMission
field string loadScreen field string loadScreen
' field string header %% ' field string header
field string gameType field string gameType
field int minPlayers field int minPlayers
field int maxPlayers field int maxPlayers
@@ -123,6 +137,8 @@
mission_config_file --> respawn mission_config_file --> respawn
class respawn { class respawn {
tag string profileName
tag string connectedServer
field string respawn field string respawn
field string respawnButton field string respawnButton
field string respawnDelay field string respawnDelay
@@ -150,6 +166,8 @@
mission_config_file --> player_ui mission_config_file --> player_ui
class player_ui { class player_ui {
tag string profileName
tag string connectedServer
field int overrideFeedback field int overrideFeedback
field int showHUD field int showHUD
field int showCompass field int showCompass
@@ -165,6 +183,8 @@
mission_config_file --> corpse_and_wreck mission_config_file --> corpse_and_wreck
class corpse_and_wreck { class corpse_and_wreck {
tag string profileName
tag string connectedServer
field int corpseManagerMode field int corpseManagerMode
field int corpseLimit field int corpseLimit
field int corpseRemovalMinTime field int corpseRemovalMinTime
@@ -178,6 +198,8 @@
mission_config_file --> mission_settings mission_config_file --> mission_settings
class mission_settings { class mission_settings {
tag string profileName
tag string connectedServer
field int aiKills field int aiKills
field int briefing field int briefing
field int debriefing field int debriefing
@@ -204,40 +226,47 @@
config_state --> visual_settings config_state --> visual_settings
class visual_settings { class visual_settings {
tag string profileName
tag string connectedServer
field string getTIParameters field string getTIParameters
field string objectViewDistance field string objectViewDistance
} }
class player_state { class player_state {
Measurement identity
Measurement status
} }
player_state --> player_identity player_state --> player_identity
class player_identity { class player_identity {
tag string profileName
tag string connectedServer
field string playerID field string playerID
field string ownerId field string ownerId
field string playerUID field string playerUID
field string profileName field string profileName
field string displayName field string displayName
field string steamName field string steamName
bool string isHC field bool isHC
} }
player_state --> player_status player_state --> player_status
class player_status { class player_status {
tag string profileName
tag string connectedServer
field int clientStateNumber field int clientStateNumber
field int adminState field int adminState
} }
player_state --> player_performance player_state --> player_performance
class player_performance { class player_performance {
tag string profileName
tag string connectedServer
field float avgPing field float avgPing
field float avgBandwidth field float avgBandwidth
field float desync field float desync
} }
@enduml

View File

@@ -1 +0,0 @@
RangerMetrics

View File

@@ -1,3 +0,0 @@
from . import influx
influx

View File

@@ -1,190 +0,0 @@
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
import threading
from pyproj import Transformer
from datetime import datetime
import json
import os
from .threading_utils import (
call_slow_function,
has_call_finished,
get_call_value,
THREADS,
THREAD_ID,
)
# get parent of parent directory (mod dir)
MOD_DIR = (
os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
.lstrip("\\")
.lstrip("?")
.lstrip("\\")
)
SETTINGS_FILE = ""
SETTINGS = None
DBCLIENT = None
WRITE_API = None
PROCESS_LOG = MOD_DIR + "\\rangermetrics_process.log"
ERROR_LOG = MOD_DIR + "\\rangermetrics_error.log"
DATA_LOG = MOD_DIR + "\\rangermetrics_data.log"
# TRANSFORMER = Transformer.from_crs("epsg:3857", "epsg:4326")
def get_dir():
# get current dir without leading or trailing slashes
this_path = (
os.path.dirname(os.path.realpath(__file__))
.lstrip("\\")
.lstrip("?")
.lstrip("\\")
)
return [0, "Current directory", this_path, PROCESS_LOG]
def load_settings():
# check if settings.json exists in MOD_DIR
if not (os.path.isfile(os.path.join(MOD_DIR, "settings.json"))):
return [1, "settings.json not found in mod directory", MOD_DIR]
global SETTINGS_FILE
SETTINGS_FILE = os.path.join(MOD_DIR, "settings.json")
# import settings from settings.json
global SETTINGS
with open(SETTINGS_FILE, "r") as f:
SETTINGS = json.load(f)
settings_validation = [
["influxdb", "host"],
["influxdb", "token"],
["influxdb", "org"],
["influxdb", "defaultBucket"],
["arma3", "refreshRateMs"],
]
for setting in settings_validation:
if not (setting[0] in SETTINGS and setting[1] in SETTINGS[setting[0]]):
return [1, f"Missing setting: {setting[0]} {setting[1]}"]
# prep settings out to hashMap style list for A3
# [[key, [subkey, subvalue], [subkey, subvalue]]]
settings_out = []
for key, value in SETTINGS.items():
if isinstance(value, dict):
this_values = []
for subkey, subvalue in value.items():
this_values.append([subkey, subvalue])
settings_out.append([key, this_values])
else:
settings_out.append([key, value])
return [0, "Settings loaded", settings_out]
def connect_to_influx():
global DBCLIENT
DBCLIENT = influxdb_client.InfluxDBClient(
url=SETTINGS["influxdb"]["host"],
token=SETTINGS["influxdb"]["token"],
org=SETTINGS["influxdb"]["org"],
enable_gzip=True,
)
if DBCLIENT is None:
return [1, "Error connecting to InfluxDB"]
global WRITE_API
WRITE_API = DBCLIENT.write_api(write_options=SYNCHRONOUS)
if WRITE_API is None:
return [1, "Error connecting to InfluxDB"]
return [0, "Connected to InfluxDB"]
def test_data(data):
with open("influxdb_data.log", "a") as f:
f.write(str(data) + "\n")
f.write(f"{datetime.now()}: {data[2]}\n")
# convert to dict from list of key, value pairs
# format [[key, value], [key, value]] to {key: value, key: value}
measurement, tag_set, field_set, position = data
tag_dict = dict(tag_set)
field_dict = dict(field_set)
f.write(
f"{datetime.now()}: {measurement}, {json.dumps(tag_dict, indent=2)}, {json.dumps(field_dict, indent=2)}, {position}\n"
)
# thread the write to influxdb
return [data, dict(data[1])]
def log_process(line):
# log the process to a file
with open(PROCESS_LOG, "a+") as f:
f.write(f"{datetime.now()}: {line}\n")
return True
def log_error(line):
# log errors to a file
with open(ERROR_LOG, "a+") as f:
f.write(f"{datetime.now()}: {line}\n")
return True
def write_influx(data):
# thread the write to influxdb
thread_id = call_slow_function(write_influx_async, (data,))
return [thread_id]
def write_influx_async(data):
processed = []
timestamp = f" {int(datetime.now().timestamp() * 1e9)}"
# return [data]
target_bucket = data[0] or SETTINGS["influxdb"]["defaultBucket"]
log_process(f"Writing to bucket {target_bucket}")
log_process(f"Processing {len(data)} data points")
for point in data[1]:
measurement = point[0]
value_type = point[1]
tag_dict = dict(point[2])
field_dict = dict(point[3])
if value_type == "int":
field_dict["value"] = int(field_dict["value"])
elif value_type == "float":
field_dict["value"] = float(field_dict["value"])
point_dict = {
"measurement": measurement,
"tags": tag_dict,
"fields": field_dict,
}
processed.append(point_dict)
log_process(f"Writing {len(processed)} data points")
try:
result = WRITE_API.write(target_bucket, SETTINGS["influxdb"]["org"], processed)
if result is not None:
log_process(f"Wrote {len(processed)} data points")
except Exception as e:
# write to file
log_error(f"Error writing to influxdb: {e}")
return [1, f"Error writing to influxdb: {e}"]
success_count = len(processed)
# free up memory
del data
del processed
del timestamp
return [0, f"Wrote {success_count} data points successfully"]
has_call_finished # noqa imported functions
get_call_value # noqa imported functions

View File

@@ -1 +0,0 @@
influxdb-client

View File

@@ -1,80 +0,0 @@
import sys
import threading
# https://stackoverflow.com/a/65447493/6543759
class ThreadWithResult(threading.Thread):
def __init__(
self, group=None, target=None, name=None, args=(), kwargs=None, *, daemon=None
):
self.exc = None
if not kwargs:
kwargs = {}
def function():
self.exc = None
try:
self.result = target(*args, **kwargs)
except: # noqa
# Save details of the exception thrown but don't rethrow,
# just complete the function
self.exc = sys.exc_info()
super().__init__(group=group, target=function, name=name, daemon=daemon)
# https://stackoverflow.com/a/12223550/6543759
def join(self, *args, **kwargs):
super().join(*args, **kwargs)
if self.exc:
msg = "Thread '%s' threw an exception: %s" % (self.getName(), self.exc[1])
new_exc = Exception(msg)
raise new_exc.with_traceback(self.exc[2])
THREADS = {}
THREAD_ID = 0
def call_slow_function(function, args):
global THREADS, THREAD_ID
thread = ThreadWithResult(target=function, args=args, daemon=True)
THREAD_ID += 1
THREADS[THREAD_ID] = thread
thread.start()
return THREAD_ID
def has_call_finished(thread_id):
global THREADS
thread = THREADS[thread_id]
if thread.is_alive():
# Thread is still working
return False
# Thread has finished, we can return its value using get_call_value()
return True
def get_call_value(thread_id):
global THREADS
thread = THREADS[thread_id]
if thread.is_alive():
# Thread is still working
raise ValueError("Thread is still running!")
# Thread has finished, we can return its value now
try:
thread.join()
finally:
del THREADS[thread_id]
try:
return thread.result
except AttributeError:
raise RuntimeError(
'The thread does not have the "result" attribute. An unhandled error occurred inside your Thread'
)