feat: Add benchmark logging system for performance analysis (closes #104)
Add Python API for capturing performance data to JSON files: - mcrfpy.start_benchmark() - start capturing frame data - mcrfpy.end_benchmark() - stop and return filename - mcrfpy.log_benchmark(msg) - add log message to current frame The benchmark system captures per-frame data including: - Frame timing (frame_time_ms, fps, timestamp) - Detailed timing breakdown (grid_render, entity_render, python, animation, fov) - Draw call and element counts - User log messages attached to frames Output JSON format supports analysis tools and includes: - Benchmark metadata (PID, timestamps, duration, total frames) - Full frame-by-frame metrics array Also refactors ProfilingMetrics from nested GameEngine struct to top-level struct for easier forward declaration. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
219a559c35
commit
a7fef2aeb6
|
|
@ -0,0 +1,37 @@
|
||||||
|
#include "BenchmarkLogger.h"
|
||||||
|
#include "GameEngine.h"
|
||||||
|
|
||||||
|
// Global benchmark logger instance
|
||||||
|
BenchmarkLogger g_benchmarkLogger;
|
||||||
|
|
||||||
|
void BenchmarkLogger::recordFrame(const ProfilingMetrics& metrics) {
|
||||||
|
if (!running) return;
|
||||||
|
|
||||||
|
auto now = std::chrono::high_resolution_clock::now();
|
||||||
|
double timestamp_ms = std::chrono::duration<double, std::milli>(now - start_time).count();
|
||||||
|
|
||||||
|
BenchmarkFrame frame;
|
||||||
|
frame.frame_number = ++frame_counter;
|
||||||
|
frame.timestamp_ms = timestamp_ms;
|
||||||
|
frame.frame_time_ms = metrics.frameTime;
|
||||||
|
frame.fps = metrics.fps;
|
||||||
|
|
||||||
|
frame.grid_render_ms = metrics.gridRenderTime;
|
||||||
|
frame.entity_render_ms = metrics.entityRenderTime;
|
||||||
|
frame.python_time_ms = metrics.pythonScriptTime;
|
||||||
|
frame.animation_time_ms = metrics.animationTime;
|
||||||
|
frame.fov_overlay_ms = metrics.fovOverlayTime;
|
||||||
|
|
||||||
|
frame.draw_calls = metrics.drawCalls;
|
||||||
|
frame.ui_elements = metrics.uiElements;
|
||||||
|
frame.visible_elements = metrics.visibleElements;
|
||||||
|
frame.grid_cells_rendered = metrics.gridCellsRendered;
|
||||||
|
frame.entities_rendered = metrics.entitiesRendered;
|
||||||
|
frame.total_entities = metrics.totalEntities;
|
||||||
|
|
||||||
|
// Move pending logs to this frame
|
||||||
|
frame.logs = std::move(pending_logs);
|
||||||
|
pending_logs.clear();
|
||||||
|
|
||||||
|
frames.push_back(std::move(frame));
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,243 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include <chrono>
|
||||||
|
#include <fstream>
|
||||||
|
#include <sstream>
|
||||||
|
#include <iomanip>
|
||||||
|
#include <stdexcept>
|
||||||
|
|
||||||
|
#ifdef _WIN32
|
||||||
|
#include <process.h>
|
||||||
|
#define getpid _getpid
|
||||||
|
#else
|
||||||
|
#include <unistd.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Forward declaration
|
||||||
|
struct ProfilingMetrics;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Frame data captured during benchmarking
|
||||||
|
*/
|
||||||
|
struct BenchmarkFrame {
|
||||||
|
int frame_number;
|
||||||
|
double timestamp_ms; // Time since benchmark start
|
||||||
|
float frame_time_ms;
|
||||||
|
int fps;
|
||||||
|
|
||||||
|
// Detailed timing breakdown
|
||||||
|
float grid_render_ms;
|
||||||
|
float entity_render_ms;
|
||||||
|
float python_time_ms;
|
||||||
|
float animation_time_ms;
|
||||||
|
float fov_overlay_ms;
|
||||||
|
|
||||||
|
// Counts
|
||||||
|
int draw_calls;
|
||||||
|
int ui_elements;
|
||||||
|
int visible_elements;
|
||||||
|
int grid_cells_rendered;
|
||||||
|
int entities_rendered;
|
||||||
|
int total_entities;
|
||||||
|
|
||||||
|
// User-provided log messages for this frame
|
||||||
|
std::vector<std::string> logs;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Benchmark logging system for capturing performance data to JSON files
|
||||||
|
*
|
||||||
|
* Usage from Python:
|
||||||
|
* mcrfpy.start_benchmark() # Start capturing
|
||||||
|
* mcrfpy.log_benchmark("msg") # Add comment to current frame
|
||||||
|
* filename = mcrfpy.end_benchmark() # Stop and get filename
|
||||||
|
*/
|
||||||
|
class BenchmarkLogger {
|
||||||
|
private:
|
||||||
|
bool running;
|
||||||
|
std::string filename;
|
||||||
|
std::chrono::high_resolution_clock::time_point start_time;
|
||||||
|
std::vector<BenchmarkFrame> frames;
|
||||||
|
std::vector<std::string> pending_logs; // Logs for current frame (before it's recorded)
|
||||||
|
int frame_counter;
|
||||||
|
|
||||||
|
// Generate filename based on PID and timestamp
|
||||||
|
std::string generateFilename() {
|
||||||
|
auto now = std::chrono::system_clock::now();
|
||||||
|
auto time_t = std::chrono::system_clock::to_time_t(now);
|
||||||
|
std::tm tm = *std::localtime(&time_t);
|
||||||
|
|
||||||
|
std::ostringstream oss;
|
||||||
|
oss << "benchmark_" << getpid() << "_"
|
||||||
|
<< std::put_time(&tm, "%Y%m%d_%H%M%S") << ".json";
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current timestamp as ISO 8601 string
|
||||||
|
std::string getCurrentTimestamp() {
|
||||||
|
auto now = std::chrono::system_clock::now();
|
||||||
|
auto time_t = std::chrono::system_clock::to_time_t(now);
|
||||||
|
std::tm tm = *std::localtime(&time_t);
|
||||||
|
|
||||||
|
std::ostringstream oss;
|
||||||
|
oss << std::put_time(&tm, "%Y-%m-%dT%H:%M:%S");
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Escape string for JSON
|
||||||
|
std::string escapeJson(const std::string& str) {
|
||||||
|
std::ostringstream oss;
|
||||||
|
for (char c : str) {
|
||||||
|
switch (c) {
|
||||||
|
case '"': oss << "\\\""; break;
|
||||||
|
case '\\': oss << "\\\\"; break;
|
||||||
|
case '\b': oss << "\\b"; break;
|
||||||
|
case '\f': oss << "\\f"; break;
|
||||||
|
case '\n': oss << "\\n"; break;
|
||||||
|
case '\r': oss << "\\r"; break;
|
||||||
|
case '\t': oss << "\\t"; break;
|
||||||
|
default:
|
||||||
|
if ('\x00' <= c && c <= '\x1f') {
|
||||||
|
oss << "\\u" << std::hex << std::setw(4) << std::setfill('0') << (int)c;
|
||||||
|
} else {
|
||||||
|
oss << c;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string start_timestamp;
|
||||||
|
|
||||||
|
public:
|
||||||
|
BenchmarkLogger() : running(false), frame_counter(0) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Start benchmark logging
|
||||||
|
* @throws std::runtime_error if already running
|
||||||
|
*/
|
||||||
|
void start() {
|
||||||
|
if (running) {
|
||||||
|
throw std::runtime_error("Benchmark already running. Call end_benchmark() first.");
|
||||||
|
}
|
||||||
|
|
||||||
|
running = true;
|
||||||
|
filename = generateFilename();
|
||||||
|
start_time = std::chrono::high_resolution_clock::now();
|
||||||
|
start_timestamp = getCurrentTimestamp();
|
||||||
|
frames.clear();
|
||||||
|
pending_logs.clear();
|
||||||
|
frame_counter = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Stop benchmark logging and write to file
|
||||||
|
* @return The filename that was written
|
||||||
|
* @throws std::runtime_error if not running
|
||||||
|
*/
|
||||||
|
std::string end() {
|
||||||
|
if (!running) {
|
||||||
|
throw std::runtime_error("No benchmark running. Call start_benchmark() first.");
|
||||||
|
}
|
||||||
|
|
||||||
|
running = false;
|
||||||
|
|
||||||
|
// Calculate duration
|
||||||
|
auto end_time = std::chrono::high_resolution_clock::now();
|
||||||
|
double duration_seconds = std::chrono::duration<double>(end_time - start_time).count();
|
||||||
|
std::string end_timestamp = getCurrentTimestamp();
|
||||||
|
|
||||||
|
// Write JSON file
|
||||||
|
std::ofstream file(filename);
|
||||||
|
if (!file.is_open()) {
|
||||||
|
throw std::runtime_error("Failed to open benchmark file for writing: " + filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
file << "{\n";
|
||||||
|
file << " \"benchmark\": {\n";
|
||||||
|
file << " \"pid\": " << getpid() << ",\n";
|
||||||
|
file << " \"start_time\": \"" << start_timestamp << "\",\n";
|
||||||
|
file << " \"end_time\": \"" << end_timestamp << "\",\n";
|
||||||
|
file << " \"total_frames\": " << frames.size() << ",\n";
|
||||||
|
file << " \"duration_seconds\": " << std::fixed << std::setprecision(3) << duration_seconds << "\n";
|
||||||
|
file << " },\n";
|
||||||
|
|
||||||
|
file << " \"frames\": [\n";
|
||||||
|
for (size_t i = 0; i < frames.size(); ++i) {
|
||||||
|
const auto& f = frames[i];
|
||||||
|
file << " {\n";
|
||||||
|
file << " \"frame_number\": " << f.frame_number << ",\n";
|
||||||
|
file << " \"timestamp_ms\": " << std::fixed << std::setprecision(3) << f.timestamp_ms << ",\n";
|
||||||
|
file << " \"frame_time_ms\": " << std::setprecision(3) << f.frame_time_ms << ",\n";
|
||||||
|
file << " \"fps\": " << f.fps << ",\n";
|
||||||
|
file << " \"grid_render_ms\": " << std::setprecision(3) << f.grid_render_ms << ",\n";
|
||||||
|
file << " \"entity_render_ms\": " << std::setprecision(3) << f.entity_render_ms << ",\n";
|
||||||
|
file << " \"python_time_ms\": " << std::setprecision(3) << f.python_time_ms << ",\n";
|
||||||
|
file << " \"animation_time_ms\": " << std::setprecision(3) << f.animation_time_ms << ",\n";
|
||||||
|
file << " \"fov_overlay_ms\": " << std::setprecision(3) << f.fov_overlay_ms << ",\n";
|
||||||
|
file << " \"draw_calls\": " << f.draw_calls << ",\n";
|
||||||
|
file << " \"ui_elements\": " << f.ui_elements << ",\n";
|
||||||
|
file << " \"visible_elements\": " << f.visible_elements << ",\n";
|
||||||
|
file << " \"grid_cells_rendered\": " << f.grid_cells_rendered << ",\n";
|
||||||
|
file << " \"entities_rendered\": " << f.entities_rendered << ",\n";
|
||||||
|
file << " \"total_entities\": " << f.total_entities << ",\n";
|
||||||
|
|
||||||
|
// Write logs array
|
||||||
|
file << " \"logs\": [";
|
||||||
|
for (size_t j = 0; j < f.logs.size(); ++j) {
|
||||||
|
file << "\"" << escapeJson(f.logs[j]) << "\"";
|
||||||
|
if (j < f.logs.size() - 1) file << ", ";
|
||||||
|
}
|
||||||
|
file << "]\n";
|
||||||
|
|
||||||
|
file << " }";
|
||||||
|
if (i < frames.size() - 1) file << ",";
|
||||||
|
file << "\n";
|
||||||
|
}
|
||||||
|
file << " ]\n";
|
||||||
|
file << "}\n";
|
||||||
|
|
||||||
|
file.close();
|
||||||
|
|
||||||
|
std::string result = filename;
|
||||||
|
filename.clear();
|
||||||
|
frames.clear();
|
||||||
|
pending_logs.clear();
|
||||||
|
frame_counter = 0;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Add a log message to the current frame
|
||||||
|
* @param message The message to log
|
||||||
|
* @throws std::runtime_error if not running
|
||||||
|
*/
|
||||||
|
void log(const std::string& message) {
|
||||||
|
if (!running) {
|
||||||
|
throw std::runtime_error("No benchmark running. Call start_benchmark() first.");
|
||||||
|
}
|
||||||
|
pending_logs.push_back(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Record frame data (called by game loop at end of each frame)
|
||||||
|
* @param metrics The current frame's profiling metrics
|
||||||
|
*/
|
||||||
|
void recordFrame(const ProfilingMetrics& metrics);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Check if benchmark is currently running
|
||||||
|
*/
|
||||||
|
bool isRunning() const { return running; }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Get current frame count
|
||||||
|
*/
|
||||||
|
int getFrameCount() const { return frame_counter; }
|
||||||
|
};
|
||||||
|
|
||||||
|
// Global benchmark logger instance
|
||||||
|
extern BenchmarkLogger g_benchmarkLogger;
|
||||||
|
|
@ -6,6 +6,7 @@
|
||||||
#include "Resources.h"
|
#include "Resources.h"
|
||||||
#include "Animation.h"
|
#include "Animation.h"
|
||||||
#include "Timer.h"
|
#include "Timer.h"
|
||||||
|
#include "BenchmarkLogger.h"
|
||||||
#include "imgui.h"
|
#include "imgui.h"
|
||||||
#include "imgui-SFML.h"
|
#include "imgui-SFML.h"
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
|
|
@ -309,6 +310,9 @@ void GameEngine::run()
|
||||||
// Update profiling metrics
|
// Update profiling metrics
|
||||||
metrics.updateFrameTime(frameTime * 1000.0f); // Convert to milliseconds
|
metrics.updateFrameTime(frameTime * 1000.0f); // Convert to milliseconds
|
||||||
|
|
||||||
|
// Record frame data for benchmark logging (if running)
|
||||||
|
g_benchmarkLogger.recordFrame(metrics);
|
||||||
|
|
||||||
int whole_fps = metrics.fps;
|
int whole_fps = metrics.fps;
|
||||||
int tenth_fps = (metrics.fps * 10) % 10;
|
int tenth_fps = (metrics.fps * 10) % 10;
|
||||||
|
|
||||||
|
|
|
||||||
124
src/GameEngine.h
124
src/GameEngine.h
|
|
@ -14,6 +14,70 @@
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Performance profiling metrics structure
|
||||||
|
*
|
||||||
|
* Tracks frame timing, render counts, and detailed timing breakdowns.
|
||||||
|
* Used by GameEngine, ProfilerOverlay (F3), and BenchmarkLogger.
|
||||||
|
*/
|
||||||
|
struct ProfilingMetrics {
|
||||||
|
float frameTime = 0.0f; // Current frame time in milliseconds
|
||||||
|
float avgFrameTime = 0.0f; // Average frame time over last N frames
|
||||||
|
int fps = 0; // Frames per second
|
||||||
|
int drawCalls = 0; // Draw calls per frame
|
||||||
|
int uiElements = 0; // Number of UI elements rendered
|
||||||
|
int visibleElements = 0; // Number of visible elements
|
||||||
|
|
||||||
|
// Detailed timing breakdowns (added for profiling system)
|
||||||
|
float gridRenderTime = 0.0f; // Time spent rendering grids (ms)
|
||||||
|
float entityRenderTime = 0.0f; // Time spent rendering entities (ms)
|
||||||
|
float fovOverlayTime = 0.0f; // Time spent rendering FOV overlays (ms)
|
||||||
|
float pythonScriptTime = 0.0f; // Time spent in Python callbacks (ms)
|
||||||
|
float animationTime = 0.0f; // Time spent updating animations (ms)
|
||||||
|
|
||||||
|
// Grid-specific metrics
|
||||||
|
int gridCellsRendered = 0; // Number of grid cells drawn this frame
|
||||||
|
int entitiesRendered = 0; // Number of entities drawn this frame
|
||||||
|
int totalEntities = 0; // Total entities in scene
|
||||||
|
|
||||||
|
// Frame time history for averaging
|
||||||
|
static constexpr int HISTORY_SIZE = 60;
|
||||||
|
float frameTimeHistory[HISTORY_SIZE] = {0};
|
||||||
|
int historyIndex = 0;
|
||||||
|
|
||||||
|
void updateFrameTime(float deltaMs) {
|
||||||
|
frameTime = deltaMs;
|
||||||
|
frameTimeHistory[historyIndex] = deltaMs;
|
||||||
|
historyIndex = (historyIndex + 1) % HISTORY_SIZE;
|
||||||
|
|
||||||
|
// Calculate average
|
||||||
|
float sum = 0.0f;
|
||||||
|
for (int i = 0; i < HISTORY_SIZE; ++i) {
|
||||||
|
sum += frameTimeHistory[i];
|
||||||
|
}
|
||||||
|
avgFrameTime = sum / HISTORY_SIZE;
|
||||||
|
fps = avgFrameTime > 0 ? static_cast<int>(1000.0f / avgFrameTime) : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void resetPerFrame() {
|
||||||
|
drawCalls = 0;
|
||||||
|
uiElements = 0;
|
||||||
|
visibleElements = 0;
|
||||||
|
|
||||||
|
// Reset per-frame timing metrics
|
||||||
|
gridRenderTime = 0.0f;
|
||||||
|
entityRenderTime = 0.0f;
|
||||||
|
fovOverlayTime = 0.0f;
|
||||||
|
pythonScriptTime = 0.0f;
|
||||||
|
animationTime = 0.0f;
|
||||||
|
|
||||||
|
// Reset per-frame counters
|
||||||
|
gridCellsRendered = 0;
|
||||||
|
entitiesRendered = 0;
|
||||||
|
totalEntities = 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
class GameEngine
|
class GameEngine
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
|
@ -76,64 +140,8 @@ public:
|
||||||
std::map<std::string, std::shared_ptr<Timer>> timers;
|
std::map<std::string, std::shared_ptr<Timer>> timers;
|
||||||
std::string scene;
|
std::string scene;
|
||||||
|
|
||||||
// Profiling metrics
|
// Profiling metrics (struct defined above class)
|
||||||
struct ProfilingMetrics {
|
ProfilingMetrics metrics;
|
||||||
float frameTime = 0.0f; // Current frame time in milliseconds
|
|
||||||
float avgFrameTime = 0.0f; // Average frame time over last N frames
|
|
||||||
int fps = 0; // Frames per second
|
|
||||||
int drawCalls = 0; // Draw calls per frame
|
|
||||||
int uiElements = 0; // Number of UI elements rendered
|
|
||||||
int visibleElements = 0; // Number of visible elements
|
|
||||||
|
|
||||||
// Detailed timing breakdowns (added for profiling system)
|
|
||||||
float gridRenderTime = 0.0f; // Time spent rendering grids (ms)
|
|
||||||
float entityRenderTime = 0.0f; // Time spent rendering entities (ms)
|
|
||||||
float fovOverlayTime = 0.0f; // Time spent rendering FOV overlays (ms)
|
|
||||||
float pythonScriptTime = 0.0f; // Time spent in Python callbacks (ms)
|
|
||||||
float animationTime = 0.0f; // Time spent updating animations (ms)
|
|
||||||
|
|
||||||
// Grid-specific metrics
|
|
||||||
int gridCellsRendered = 0; // Number of grid cells drawn this frame
|
|
||||||
int entitiesRendered = 0; // Number of entities drawn this frame
|
|
||||||
int totalEntities = 0; // Total entities in scene
|
|
||||||
|
|
||||||
// Frame time history for averaging
|
|
||||||
static constexpr int HISTORY_SIZE = 60;
|
|
||||||
float frameTimeHistory[HISTORY_SIZE] = {0};
|
|
||||||
int historyIndex = 0;
|
|
||||||
|
|
||||||
void updateFrameTime(float deltaMs) {
|
|
||||||
frameTime = deltaMs;
|
|
||||||
frameTimeHistory[historyIndex] = deltaMs;
|
|
||||||
historyIndex = (historyIndex + 1) % HISTORY_SIZE;
|
|
||||||
|
|
||||||
// Calculate average
|
|
||||||
float sum = 0.0f;
|
|
||||||
for (int i = 0; i < HISTORY_SIZE; ++i) {
|
|
||||||
sum += frameTimeHistory[i];
|
|
||||||
}
|
|
||||||
avgFrameTime = sum / HISTORY_SIZE;
|
|
||||||
fps = avgFrameTime > 0 ? static_cast<int>(1000.0f / avgFrameTime) : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void resetPerFrame() {
|
|
||||||
drawCalls = 0;
|
|
||||||
uiElements = 0;
|
|
||||||
visibleElements = 0;
|
|
||||||
|
|
||||||
// Reset per-frame timing metrics
|
|
||||||
gridRenderTime = 0.0f;
|
|
||||||
entityRenderTime = 0.0f;
|
|
||||||
fovOverlayTime = 0.0f;
|
|
||||||
pythonScriptTime = 0.0f;
|
|
||||||
animationTime = 0.0f;
|
|
||||||
|
|
||||||
// Reset per-frame counters
|
|
||||||
gridCellsRendered = 0;
|
|
||||||
entitiesRendered = 0;
|
|
||||||
totalEntities = 0;
|
|
||||||
}
|
|
||||||
} metrics;
|
|
||||||
|
|
||||||
GameEngine();
|
GameEngine();
|
||||||
GameEngine(const McRogueFaceConfig& cfg);
|
GameEngine(const McRogueFaceConfig& cfg);
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@
|
||||||
#include "PySceneObject.h"
|
#include "PySceneObject.h"
|
||||||
#include "GameEngine.h"
|
#include "GameEngine.h"
|
||||||
#include "ImGuiConsole.h"
|
#include "ImGuiConsole.h"
|
||||||
|
#include "BenchmarkLogger.h"
|
||||||
#include "UI.h"
|
#include "UI.h"
|
||||||
#include "UILine.h"
|
#include "UILine.h"
|
||||||
#include "UICircle.h"
|
#include "UICircle.h"
|
||||||
|
|
@ -213,6 +214,35 @@ static PyMethodDef mcrfpyMethods[] = {
|
||||||
MCRF_NOTE("When disabled, the grave/tilde key will not open the console. Use this to ship games without debug features.")
|
MCRF_NOTE("When disabled, the grave/tilde key will not open the console. Use this to ship games without debug features.")
|
||||||
)},
|
)},
|
||||||
|
|
||||||
|
{"start_benchmark", McRFPy_API::_startBenchmark, METH_NOARGS,
|
||||||
|
MCRF_FUNCTION(start_benchmark,
|
||||||
|
MCRF_SIG("()", "None"),
|
||||||
|
MCRF_DESC("Start capturing benchmark data to a file."),
|
||||||
|
MCRF_RETURNS("None")
|
||||||
|
MCRF_RAISES("RuntimeError", "If a benchmark is already running")
|
||||||
|
MCRF_NOTE("Benchmark filename is auto-generated from PID and timestamp. Use end_benchmark() to stop and get filename.")
|
||||||
|
)},
|
||||||
|
|
||||||
|
{"end_benchmark", McRFPy_API::_endBenchmark, METH_NOARGS,
|
||||||
|
MCRF_FUNCTION(end_benchmark,
|
||||||
|
MCRF_SIG("()", "str"),
|
||||||
|
MCRF_DESC("Stop benchmark capture and write data to JSON file."),
|
||||||
|
MCRF_RETURNS("str: The filename of the written benchmark data")
|
||||||
|
MCRF_RAISES("RuntimeError", "If no benchmark is currently running")
|
||||||
|
MCRF_NOTE("Returns the auto-generated filename (e.g., 'benchmark_12345_20250528_143022.json')")
|
||||||
|
)},
|
||||||
|
|
||||||
|
{"log_benchmark", McRFPy_API::_logBenchmark, METH_VARARGS,
|
||||||
|
MCRF_FUNCTION(log_benchmark,
|
||||||
|
MCRF_SIG("(message: str)", "None"),
|
||||||
|
MCRF_DESC("Add a log message to the current benchmark frame."),
|
||||||
|
MCRF_ARGS_START
|
||||||
|
MCRF_ARG("message", "Text to associate with the current frame")
|
||||||
|
MCRF_RETURNS("None")
|
||||||
|
MCRF_RAISES("RuntimeError", "If no benchmark is currently running")
|
||||||
|
MCRF_NOTE("Messages appear in the 'logs' array of each frame in the output JSON.")
|
||||||
|
)},
|
||||||
|
|
||||||
{NULL, NULL, 0, NULL}
|
{NULL, NULL, 0, NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1273,6 +1303,42 @@ PyObject* McRFPy_API::_setDevConsole(PyObject* self, PyObject* args) {
|
||||||
Py_RETURN_NONE;
|
Py_RETURN_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Benchmark logging implementation (#104)
|
||||||
|
PyObject* McRFPy_API::_startBenchmark(PyObject* self, PyObject* args) {
|
||||||
|
try {
|
||||||
|
g_benchmarkLogger.start();
|
||||||
|
Py_RETURN_NONE;
|
||||||
|
} catch (const std::runtime_error& e) {
|
||||||
|
PyErr_SetString(PyExc_RuntimeError, e.what());
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject* McRFPy_API::_endBenchmark(PyObject* self, PyObject* args) {
|
||||||
|
try {
|
||||||
|
std::string filename = g_benchmarkLogger.end();
|
||||||
|
return PyUnicode_FromString(filename.c_str());
|
||||||
|
} catch (const std::runtime_error& e) {
|
||||||
|
PyErr_SetString(PyExc_RuntimeError, e.what());
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject* McRFPy_API::_logBenchmark(PyObject* self, PyObject* args) {
|
||||||
|
const char* message;
|
||||||
|
if (!PyArg_ParseTuple(args, "s", &message)) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
g_benchmarkLogger.log(message);
|
||||||
|
Py_RETURN_NONE;
|
||||||
|
} catch (const std::runtime_error& e) {
|
||||||
|
PyErr_SetString(PyExc_RuntimeError, e.what());
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Exception handling implementation
|
// Exception handling implementation
|
||||||
void McRFPy_API::signalPythonException() {
|
void McRFPy_API::signalPythonException() {
|
||||||
// Check if we should exit on exception (consult config via game)
|
// Check if we should exit on exception (consult config via game)
|
||||||
|
|
|
||||||
|
|
@ -82,6 +82,11 @@ public:
|
||||||
// Profiling/metrics
|
// Profiling/metrics
|
||||||
static PyObject* _getMetrics(PyObject*, PyObject*);
|
static PyObject* _getMetrics(PyObject*, PyObject*);
|
||||||
|
|
||||||
|
// Benchmark logging (#104)
|
||||||
|
static PyObject* _startBenchmark(PyObject*, PyObject*);
|
||||||
|
static PyObject* _endBenchmark(PyObject*, PyObject*);
|
||||||
|
static PyObject* _logBenchmark(PyObject*, PyObject*);
|
||||||
|
|
||||||
// Developer console
|
// Developer console
|
||||||
static PyObject* _setDevConsole(PyObject*, PyObject*);
|
static PyObject* _setDevConsole(PyObject*, PyObject*);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,129 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Test benchmark logging functionality (Issue #104)"""
|
||||||
|
import mcrfpy
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
def run_test(runtime):
|
||||||
|
"""Timer callback to test benchmark logging"""
|
||||||
|
# Stop the benchmark and get filename
|
||||||
|
try:
|
||||||
|
filename = mcrfpy.end_benchmark()
|
||||||
|
print(f"Benchmark written to: {filename}")
|
||||||
|
|
||||||
|
# Check file exists
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
print(f"FAIL: Benchmark file not found: {filename}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Parse and validate JSON
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Validate structure
|
||||||
|
if 'benchmark' not in data:
|
||||||
|
print("FAIL: Missing 'benchmark' key")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if 'frames' not in data:
|
||||||
|
print("FAIL: Missing 'frames' key")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Check benchmark metadata
|
||||||
|
bench = data['benchmark']
|
||||||
|
if 'pid' not in bench:
|
||||||
|
print("FAIL: Missing 'pid' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
if 'start_time' not in bench:
|
||||||
|
print("FAIL: Missing 'start_time' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
if 'end_time' not in bench:
|
||||||
|
print("FAIL: Missing 'end_time' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
if 'total_frames' not in bench:
|
||||||
|
print("FAIL: Missing 'total_frames' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f" PID: {bench['pid']}")
|
||||||
|
print(f" Duration: {bench['duration_seconds']:.3f}s")
|
||||||
|
print(f" Frames: {bench['total_frames']}")
|
||||||
|
|
||||||
|
# Check we have frames
|
||||||
|
if len(data['frames']) == 0:
|
||||||
|
print("FAIL: No frames recorded")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Check frame structure
|
||||||
|
frame = data['frames'][0]
|
||||||
|
required_fields = ['frame_number', 'timestamp_ms', 'frame_time_ms', 'fps',
|
||||||
|
'grid_render_ms', 'entity_render_ms', 'python_time_ms',
|
||||||
|
'draw_calls', 'ui_elements', 'logs']
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in frame:
|
||||||
|
print(f"FAIL: Missing field '{field}' in frame")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Check log message was captured
|
||||||
|
found_log = False
|
||||||
|
for frame in data['frames']:
|
||||||
|
if 'Test log message' in frame.get('logs', []):
|
||||||
|
found_log = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not found_log:
|
||||||
|
print("FAIL: Log message not found in any frame")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f" First frame FPS: {data['frames'][0]['fps']}")
|
||||||
|
print(f" Log messages captured: Yes")
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
os.remove(filename)
|
||||||
|
print(f" Cleaned up: {filename}")
|
||||||
|
|
||||||
|
print("PASS")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"FAIL: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Test error handling - calling end without start
|
||||||
|
try:
|
||||||
|
mcrfpy.end_benchmark()
|
||||||
|
print("FAIL: end_benchmark() should have raised RuntimeError")
|
||||||
|
sys.exit(1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
print(f"Correct error on end without start: {e}")
|
||||||
|
|
||||||
|
# Test error handling - logging without start
|
||||||
|
try:
|
||||||
|
mcrfpy.log_benchmark("test")
|
||||||
|
print("FAIL: log_benchmark() should have raised RuntimeError")
|
||||||
|
sys.exit(1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
print(f"Correct error on log without start: {e}")
|
||||||
|
|
||||||
|
# Start the benchmark
|
||||||
|
mcrfpy.start_benchmark()
|
||||||
|
print("Benchmark started")
|
||||||
|
|
||||||
|
# Test error handling - double start
|
||||||
|
try:
|
||||||
|
mcrfpy.start_benchmark()
|
||||||
|
print("FAIL: double start_benchmark() should have raised RuntimeError")
|
||||||
|
sys.exit(1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
print(f"Correct error on double start: {e}")
|
||||||
|
|
||||||
|
# Log a test message
|
||||||
|
mcrfpy.log_benchmark("Test log message")
|
||||||
|
print("Logged test message")
|
||||||
|
|
||||||
|
# Set up scene and run for a few frames
|
||||||
|
mcrfpy.createScene("test")
|
||||||
|
mcrfpy.setScene("test")
|
||||||
|
|
||||||
|
# Schedule test completion after ~100ms (to capture some frames)
|
||||||
|
mcrfpy.setTimer("test", run_test, 100)
|
||||||
Loading…
Reference in New Issue