Compare commits
3 Commits
6c496b8732
...
8583db7225
| Author | SHA1 | Date |
|---|---|---|
|
|
8583db7225 | |
|
|
a7fef2aeb6 | |
|
|
219a559c35 |
|
|
@ -0,0 +1,38 @@
|
||||||
|
#include "BenchmarkLogger.h"
|
||||||
|
#include "GameEngine.h"
|
||||||
|
|
||||||
|
// Global benchmark logger instance
|
||||||
|
BenchmarkLogger g_benchmarkLogger;
|
||||||
|
|
||||||
|
void BenchmarkLogger::recordFrame(const ProfilingMetrics& metrics) {
|
||||||
|
if (!running) return;
|
||||||
|
|
||||||
|
auto now = std::chrono::high_resolution_clock::now();
|
||||||
|
double timestamp_ms = std::chrono::duration<double, std::milli>(now - start_time).count();
|
||||||
|
|
||||||
|
BenchmarkFrame frame;
|
||||||
|
frame.frame_number = ++frame_counter;
|
||||||
|
frame.timestamp_ms = timestamp_ms;
|
||||||
|
frame.frame_time_ms = metrics.frameTime;
|
||||||
|
frame.fps = metrics.fps;
|
||||||
|
|
||||||
|
frame.work_time_ms = metrics.workTime;
|
||||||
|
frame.grid_render_ms = metrics.gridRenderTime;
|
||||||
|
frame.entity_render_ms = metrics.entityRenderTime;
|
||||||
|
frame.python_time_ms = metrics.pythonScriptTime;
|
||||||
|
frame.animation_time_ms = metrics.animationTime;
|
||||||
|
frame.fov_overlay_ms = metrics.fovOverlayTime;
|
||||||
|
|
||||||
|
frame.draw_calls = metrics.drawCalls;
|
||||||
|
frame.ui_elements = metrics.uiElements;
|
||||||
|
frame.visible_elements = metrics.visibleElements;
|
||||||
|
frame.grid_cells_rendered = metrics.gridCellsRendered;
|
||||||
|
frame.entities_rendered = metrics.entitiesRendered;
|
||||||
|
frame.total_entities = metrics.totalEntities;
|
||||||
|
|
||||||
|
// Move pending logs to this frame
|
||||||
|
frame.logs = std::move(pending_logs);
|
||||||
|
pending_logs.clear();
|
||||||
|
|
||||||
|
frames.push_back(std::move(frame));
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,245 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include <chrono>
|
||||||
|
#include <fstream>
|
||||||
|
#include <sstream>
|
||||||
|
#include <iomanip>
|
||||||
|
#include <stdexcept>
|
||||||
|
|
||||||
|
#ifdef _WIN32
|
||||||
|
#include <process.h>
|
||||||
|
#define getpid _getpid
|
||||||
|
#else
|
||||||
|
#include <unistd.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Forward declaration
|
||||||
|
struct ProfilingMetrics;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Frame data captured during benchmarking
|
||||||
|
*/
|
||||||
|
struct BenchmarkFrame {
|
||||||
|
int frame_number;
|
||||||
|
double timestamp_ms; // Time since benchmark start
|
||||||
|
float frame_time_ms;
|
||||||
|
int fps;
|
||||||
|
|
||||||
|
// Detailed timing breakdown
|
||||||
|
float work_time_ms; // Actual work time (frame_time - sleep_time)
|
||||||
|
float grid_render_ms;
|
||||||
|
float entity_render_ms;
|
||||||
|
float python_time_ms;
|
||||||
|
float animation_time_ms;
|
||||||
|
float fov_overlay_ms;
|
||||||
|
|
||||||
|
// Counts
|
||||||
|
int draw_calls;
|
||||||
|
int ui_elements;
|
||||||
|
int visible_elements;
|
||||||
|
int grid_cells_rendered;
|
||||||
|
int entities_rendered;
|
||||||
|
int total_entities;
|
||||||
|
|
||||||
|
// User-provided log messages for this frame
|
||||||
|
std::vector<std::string> logs;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Benchmark logging system for capturing performance data to JSON files
|
||||||
|
*
|
||||||
|
* Usage from Python:
|
||||||
|
* mcrfpy.start_benchmark() # Start capturing
|
||||||
|
* mcrfpy.log_benchmark("msg") # Add comment to current frame
|
||||||
|
* filename = mcrfpy.end_benchmark() # Stop and get filename
|
||||||
|
*/
|
||||||
|
class BenchmarkLogger {
|
||||||
|
private:
|
||||||
|
bool running;
|
||||||
|
std::string filename;
|
||||||
|
std::chrono::high_resolution_clock::time_point start_time;
|
||||||
|
std::vector<BenchmarkFrame> frames;
|
||||||
|
std::vector<std::string> pending_logs; // Logs for current frame (before it's recorded)
|
||||||
|
int frame_counter;
|
||||||
|
|
||||||
|
// Generate filename based on PID and timestamp
|
||||||
|
std::string generateFilename() {
|
||||||
|
auto now = std::chrono::system_clock::now();
|
||||||
|
auto time_t = std::chrono::system_clock::to_time_t(now);
|
||||||
|
std::tm tm = *std::localtime(&time_t);
|
||||||
|
|
||||||
|
std::ostringstream oss;
|
||||||
|
oss << "benchmark_" << getpid() << "_"
|
||||||
|
<< std::put_time(&tm, "%Y%m%d_%H%M%S") << ".json";
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current timestamp as ISO 8601 string
|
||||||
|
std::string getCurrentTimestamp() {
|
||||||
|
auto now = std::chrono::system_clock::now();
|
||||||
|
auto time_t = std::chrono::system_clock::to_time_t(now);
|
||||||
|
std::tm tm = *std::localtime(&time_t);
|
||||||
|
|
||||||
|
std::ostringstream oss;
|
||||||
|
oss << std::put_time(&tm, "%Y-%m-%dT%H:%M:%S");
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Escape string for JSON
|
||||||
|
std::string escapeJson(const std::string& str) {
|
||||||
|
std::ostringstream oss;
|
||||||
|
for (char c : str) {
|
||||||
|
switch (c) {
|
||||||
|
case '"': oss << "\\\""; break;
|
||||||
|
case '\\': oss << "\\\\"; break;
|
||||||
|
case '\b': oss << "\\b"; break;
|
||||||
|
case '\f': oss << "\\f"; break;
|
||||||
|
case '\n': oss << "\\n"; break;
|
||||||
|
case '\r': oss << "\\r"; break;
|
||||||
|
case '\t': oss << "\\t"; break;
|
||||||
|
default:
|
||||||
|
if ('\x00' <= c && c <= '\x1f') {
|
||||||
|
oss << "\\u" << std::hex << std::setw(4) << std::setfill('0') << (int)c;
|
||||||
|
} else {
|
||||||
|
oss << c;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string start_timestamp;
|
||||||
|
|
||||||
|
public:
|
||||||
|
BenchmarkLogger() : running(false), frame_counter(0) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Start benchmark logging
|
||||||
|
* @throws std::runtime_error if already running
|
||||||
|
*/
|
||||||
|
void start() {
|
||||||
|
if (running) {
|
||||||
|
throw std::runtime_error("Benchmark already running. Call end_benchmark() first.");
|
||||||
|
}
|
||||||
|
|
||||||
|
running = true;
|
||||||
|
filename = generateFilename();
|
||||||
|
start_time = std::chrono::high_resolution_clock::now();
|
||||||
|
start_timestamp = getCurrentTimestamp();
|
||||||
|
frames.clear();
|
||||||
|
pending_logs.clear();
|
||||||
|
frame_counter = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Stop benchmark logging and write to file
|
||||||
|
* @return The filename that was written
|
||||||
|
* @throws std::runtime_error if not running
|
||||||
|
*/
|
||||||
|
std::string end() {
|
||||||
|
if (!running) {
|
||||||
|
throw std::runtime_error("No benchmark running. Call start_benchmark() first.");
|
||||||
|
}
|
||||||
|
|
||||||
|
running = false;
|
||||||
|
|
||||||
|
// Calculate duration
|
||||||
|
auto end_time = std::chrono::high_resolution_clock::now();
|
||||||
|
double duration_seconds = std::chrono::duration<double>(end_time - start_time).count();
|
||||||
|
std::string end_timestamp = getCurrentTimestamp();
|
||||||
|
|
||||||
|
// Write JSON file
|
||||||
|
std::ofstream file(filename);
|
||||||
|
if (!file.is_open()) {
|
||||||
|
throw std::runtime_error("Failed to open benchmark file for writing: " + filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
file << "{\n";
|
||||||
|
file << " \"benchmark\": {\n";
|
||||||
|
file << " \"pid\": " << getpid() << ",\n";
|
||||||
|
file << " \"start_time\": \"" << start_timestamp << "\",\n";
|
||||||
|
file << " \"end_time\": \"" << end_timestamp << "\",\n";
|
||||||
|
file << " \"total_frames\": " << frames.size() << ",\n";
|
||||||
|
file << " \"duration_seconds\": " << std::fixed << std::setprecision(3) << duration_seconds << "\n";
|
||||||
|
file << " },\n";
|
||||||
|
|
||||||
|
file << " \"frames\": [\n";
|
||||||
|
for (size_t i = 0; i < frames.size(); ++i) {
|
||||||
|
const auto& f = frames[i];
|
||||||
|
file << " {\n";
|
||||||
|
file << " \"frame_number\": " << f.frame_number << ",\n";
|
||||||
|
file << " \"timestamp_ms\": " << std::fixed << std::setprecision(3) << f.timestamp_ms << ",\n";
|
||||||
|
file << " \"frame_time_ms\": " << std::setprecision(3) << f.frame_time_ms << ",\n";
|
||||||
|
file << " \"fps\": " << f.fps << ",\n";
|
||||||
|
file << " \"work_time_ms\": " << std::setprecision(3) << f.work_time_ms << ",\n";
|
||||||
|
file << " \"grid_render_ms\": " << std::setprecision(3) << f.grid_render_ms << ",\n";
|
||||||
|
file << " \"entity_render_ms\": " << std::setprecision(3) << f.entity_render_ms << ",\n";
|
||||||
|
file << " \"python_time_ms\": " << std::setprecision(3) << f.python_time_ms << ",\n";
|
||||||
|
file << " \"animation_time_ms\": " << std::setprecision(3) << f.animation_time_ms << ",\n";
|
||||||
|
file << " \"fov_overlay_ms\": " << std::setprecision(3) << f.fov_overlay_ms << ",\n";
|
||||||
|
file << " \"draw_calls\": " << f.draw_calls << ",\n";
|
||||||
|
file << " \"ui_elements\": " << f.ui_elements << ",\n";
|
||||||
|
file << " \"visible_elements\": " << f.visible_elements << ",\n";
|
||||||
|
file << " \"grid_cells_rendered\": " << f.grid_cells_rendered << ",\n";
|
||||||
|
file << " \"entities_rendered\": " << f.entities_rendered << ",\n";
|
||||||
|
file << " \"total_entities\": " << f.total_entities << ",\n";
|
||||||
|
|
||||||
|
// Write logs array
|
||||||
|
file << " \"logs\": [";
|
||||||
|
for (size_t j = 0; j < f.logs.size(); ++j) {
|
||||||
|
file << "\"" << escapeJson(f.logs[j]) << "\"";
|
||||||
|
if (j < f.logs.size() - 1) file << ", ";
|
||||||
|
}
|
||||||
|
file << "]\n";
|
||||||
|
|
||||||
|
file << " }";
|
||||||
|
if (i < frames.size() - 1) file << ",";
|
||||||
|
file << "\n";
|
||||||
|
}
|
||||||
|
file << " ]\n";
|
||||||
|
file << "}\n";
|
||||||
|
|
||||||
|
file.close();
|
||||||
|
|
||||||
|
std::string result = filename;
|
||||||
|
filename.clear();
|
||||||
|
frames.clear();
|
||||||
|
pending_logs.clear();
|
||||||
|
frame_counter = 0;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Add a log message to the current frame
|
||||||
|
* @param message The message to log
|
||||||
|
* @throws std::runtime_error if not running
|
||||||
|
*/
|
||||||
|
void log(const std::string& message) {
|
||||||
|
if (!running) {
|
||||||
|
throw std::runtime_error("No benchmark running. Call start_benchmark() first.");
|
||||||
|
}
|
||||||
|
pending_logs.push_back(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Record frame data (called by game loop at end of each frame)
|
||||||
|
* @param metrics The current frame's profiling metrics
|
||||||
|
*/
|
||||||
|
void recordFrame(const ProfilingMetrics& metrics);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Check if benchmark is currently running
|
||||||
|
*/
|
||||||
|
bool isRunning() const { return running; }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Get current frame count
|
||||||
|
*/
|
||||||
|
int getFrameCount() const { return frame_counter; }
|
||||||
|
};
|
||||||
|
|
||||||
|
// Global benchmark logger instance
|
||||||
|
extern BenchmarkLogger g_benchmarkLogger;
|
||||||
|
|
@ -6,6 +6,7 @@
|
||||||
#include "Resources.h"
|
#include "Resources.h"
|
||||||
#include "Animation.h"
|
#include "Animation.h"
|
||||||
#include "Timer.h"
|
#include "Timer.h"
|
||||||
|
#include "BenchmarkLogger.h"
|
||||||
#include "imgui.h"
|
#include "imgui.h"
|
||||||
#include "imgui-SFML.h"
|
#include "imgui-SFML.h"
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
|
|
@ -290,6 +291,9 @@ void GameEngine::run()
|
||||||
ImGui::SFML::Render(*window);
|
ImGui::SFML::Render(*window);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Record work time before display (which may block for vsync/framerate limit)
|
||||||
|
metrics.workTime = clock.getElapsedTime().asSeconds() * 1000.0f;
|
||||||
|
|
||||||
// Display the frame
|
// Display the frame
|
||||||
if (headless) {
|
if (headless) {
|
||||||
headless_renderer->display();
|
headless_renderer->display();
|
||||||
|
|
@ -309,6 +313,9 @@ void GameEngine::run()
|
||||||
// Update profiling metrics
|
// Update profiling metrics
|
||||||
metrics.updateFrameTime(frameTime * 1000.0f); // Convert to milliseconds
|
metrics.updateFrameTime(frameTime * 1000.0f); // Convert to milliseconds
|
||||||
|
|
||||||
|
// Record frame data for benchmark logging (if running)
|
||||||
|
g_benchmarkLogger.recordFrame(metrics);
|
||||||
|
|
||||||
int whole_fps = metrics.fps;
|
int whole_fps = metrics.fps;
|
||||||
int tenth_fps = (metrics.fps * 10) % 10;
|
int tenth_fps = (metrics.fps * 10) % 10;
|
||||||
|
|
||||||
|
|
|
||||||
125
src/GameEngine.h
125
src/GameEngine.h
|
|
@ -14,6 +14,71 @@
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Performance profiling metrics structure
|
||||||
|
*
|
||||||
|
* Tracks frame timing, render counts, and detailed timing breakdowns.
|
||||||
|
* Used by GameEngine, ProfilerOverlay (F3), and BenchmarkLogger.
|
||||||
|
*/
|
||||||
|
struct ProfilingMetrics {
|
||||||
|
float frameTime = 0.0f; // Current frame time in milliseconds
|
||||||
|
float avgFrameTime = 0.0f; // Average frame time over last N frames
|
||||||
|
int fps = 0; // Frames per second
|
||||||
|
int drawCalls = 0; // Draw calls per frame
|
||||||
|
int uiElements = 0; // Number of UI elements rendered
|
||||||
|
int visibleElements = 0; // Number of visible elements
|
||||||
|
|
||||||
|
// Detailed timing breakdowns (added for profiling system)
|
||||||
|
float gridRenderTime = 0.0f; // Time spent rendering grids (ms)
|
||||||
|
float entityRenderTime = 0.0f; // Time spent rendering entities (ms)
|
||||||
|
float fovOverlayTime = 0.0f; // Time spent rendering FOV overlays (ms)
|
||||||
|
float pythonScriptTime = 0.0f; // Time spent in Python callbacks (ms)
|
||||||
|
float animationTime = 0.0f; // Time spent updating animations (ms)
|
||||||
|
float workTime = 0.0f; // Total work time before display/sleep (ms)
|
||||||
|
|
||||||
|
// Grid-specific metrics
|
||||||
|
int gridCellsRendered = 0; // Number of grid cells drawn this frame
|
||||||
|
int entitiesRendered = 0; // Number of entities drawn this frame
|
||||||
|
int totalEntities = 0; // Total entities in scene
|
||||||
|
|
||||||
|
// Frame time history for averaging
|
||||||
|
static constexpr int HISTORY_SIZE = 60;
|
||||||
|
float frameTimeHistory[HISTORY_SIZE] = {0};
|
||||||
|
int historyIndex = 0;
|
||||||
|
|
||||||
|
void updateFrameTime(float deltaMs) {
|
||||||
|
frameTime = deltaMs;
|
||||||
|
frameTimeHistory[historyIndex] = deltaMs;
|
||||||
|
historyIndex = (historyIndex + 1) % HISTORY_SIZE;
|
||||||
|
|
||||||
|
// Calculate average
|
||||||
|
float sum = 0.0f;
|
||||||
|
for (int i = 0; i < HISTORY_SIZE; ++i) {
|
||||||
|
sum += frameTimeHistory[i];
|
||||||
|
}
|
||||||
|
avgFrameTime = sum / HISTORY_SIZE;
|
||||||
|
fps = avgFrameTime > 0 ? static_cast<int>(1000.0f / avgFrameTime) : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void resetPerFrame() {
|
||||||
|
drawCalls = 0;
|
||||||
|
uiElements = 0;
|
||||||
|
visibleElements = 0;
|
||||||
|
|
||||||
|
// Reset per-frame timing metrics
|
||||||
|
gridRenderTime = 0.0f;
|
||||||
|
entityRenderTime = 0.0f;
|
||||||
|
fovOverlayTime = 0.0f;
|
||||||
|
pythonScriptTime = 0.0f;
|
||||||
|
animationTime = 0.0f;
|
||||||
|
|
||||||
|
// Reset per-frame counters
|
||||||
|
gridCellsRendered = 0;
|
||||||
|
entitiesRendered = 0;
|
||||||
|
totalEntities = 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
class GameEngine
|
class GameEngine
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
|
@ -76,64 +141,8 @@ public:
|
||||||
std::map<std::string, std::shared_ptr<Timer>> timers;
|
std::map<std::string, std::shared_ptr<Timer>> timers;
|
||||||
std::string scene;
|
std::string scene;
|
||||||
|
|
||||||
// Profiling metrics
|
// Profiling metrics (struct defined above class)
|
||||||
struct ProfilingMetrics {
|
ProfilingMetrics metrics;
|
||||||
float frameTime = 0.0f; // Current frame time in milliseconds
|
|
||||||
float avgFrameTime = 0.0f; // Average frame time over last N frames
|
|
||||||
int fps = 0; // Frames per second
|
|
||||||
int drawCalls = 0; // Draw calls per frame
|
|
||||||
int uiElements = 0; // Number of UI elements rendered
|
|
||||||
int visibleElements = 0; // Number of visible elements
|
|
||||||
|
|
||||||
// Detailed timing breakdowns (added for profiling system)
|
|
||||||
float gridRenderTime = 0.0f; // Time spent rendering grids (ms)
|
|
||||||
float entityRenderTime = 0.0f; // Time spent rendering entities (ms)
|
|
||||||
float fovOverlayTime = 0.0f; // Time spent rendering FOV overlays (ms)
|
|
||||||
float pythonScriptTime = 0.0f; // Time spent in Python callbacks (ms)
|
|
||||||
float animationTime = 0.0f; // Time spent updating animations (ms)
|
|
||||||
|
|
||||||
// Grid-specific metrics
|
|
||||||
int gridCellsRendered = 0; // Number of grid cells drawn this frame
|
|
||||||
int entitiesRendered = 0; // Number of entities drawn this frame
|
|
||||||
int totalEntities = 0; // Total entities in scene
|
|
||||||
|
|
||||||
// Frame time history for averaging
|
|
||||||
static constexpr int HISTORY_SIZE = 60;
|
|
||||||
float frameTimeHistory[HISTORY_SIZE] = {0};
|
|
||||||
int historyIndex = 0;
|
|
||||||
|
|
||||||
void updateFrameTime(float deltaMs) {
|
|
||||||
frameTime = deltaMs;
|
|
||||||
frameTimeHistory[historyIndex] = deltaMs;
|
|
||||||
historyIndex = (historyIndex + 1) % HISTORY_SIZE;
|
|
||||||
|
|
||||||
// Calculate average
|
|
||||||
float sum = 0.0f;
|
|
||||||
for (int i = 0; i < HISTORY_SIZE; ++i) {
|
|
||||||
sum += frameTimeHistory[i];
|
|
||||||
}
|
|
||||||
avgFrameTime = sum / HISTORY_SIZE;
|
|
||||||
fps = avgFrameTime > 0 ? static_cast<int>(1000.0f / avgFrameTime) : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void resetPerFrame() {
|
|
||||||
drawCalls = 0;
|
|
||||||
uiElements = 0;
|
|
||||||
visibleElements = 0;
|
|
||||||
|
|
||||||
// Reset per-frame timing metrics
|
|
||||||
gridRenderTime = 0.0f;
|
|
||||||
entityRenderTime = 0.0f;
|
|
||||||
fovOverlayTime = 0.0f;
|
|
||||||
pythonScriptTime = 0.0f;
|
|
||||||
animationTime = 0.0f;
|
|
||||||
|
|
||||||
// Reset per-frame counters
|
|
||||||
gridCellsRendered = 0;
|
|
||||||
entitiesRendered = 0;
|
|
||||||
totalEntities = 0;
|
|
||||||
}
|
|
||||||
} metrics;
|
|
||||||
|
|
||||||
GameEngine();
|
GameEngine();
|
||||||
GameEngine(const McRogueFaceConfig& cfg);
|
GameEngine(const McRogueFaceConfig& cfg);
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@
|
||||||
#include "PySceneObject.h"
|
#include "PySceneObject.h"
|
||||||
#include "GameEngine.h"
|
#include "GameEngine.h"
|
||||||
#include "ImGuiConsole.h"
|
#include "ImGuiConsole.h"
|
||||||
|
#include "BenchmarkLogger.h"
|
||||||
#include "UI.h"
|
#include "UI.h"
|
||||||
#include "UILine.h"
|
#include "UILine.h"
|
||||||
#include "UICircle.h"
|
#include "UICircle.h"
|
||||||
|
|
@ -213,6 +214,35 @@ static PyMethodDef mcrfpyMethods[] = {
|
||||||
MCRF_NOTE("When disabled, the grave/tilde key will not open the console. Use this to ship games without debug features.")
|
MCRF_NOTE("When disabled, the grave/tilde key will not open the console. Use this to ship games without debug features.")
|
||||||
)},
|
)},
|
||||||
|
|
||||||
|
{"start_benchmark", McRFPy_API::_startBenchmark, METH_NOARGS,
|
||||||
|
MCRF_FUNCTION(start_benchmark,
|
||||||
|
MCRF_SIG("()", "None"),
|
||||||
|
MCRF_DESC("Start capturing benchmark data to a file."),
|
||||||
|
MCRF_RETURNS("None")
|
||||||
|
MCRF_RAISES("RuntimeError", "If a benchmark is already running")
|
||||||
|
MCRF_NOTE("Benchmark filename is auto-generated from PID and timestamp. Use end_benchmark() to stop and get filename.")
|
||||||
|
)},
|
||||||
|
|
||||||
|
{"end_benchmark", McRFPy_API::_endBenchmark, METH_NOARGS,
|
||||||
|
MCRF_FUNCTION(end_benchmark,
|
||||||
|
MCRF_SIG("()", "str"),
|
||||||
|
MCRF_DESC("Stop benchmark capture and write data to JSON file."),
|
||||||
|
MCRF_RETURNS("str: The filename of the written benchmark data")
|
||||||
|
MCRF_RAISES("RuntimeError", "If no benchmark is currently running")
|
||||||
|
MCRF_NOTE("Returns the auto-generated filename (e.g., 'benchmark_12345_20250528_143022.json')")
|
||||||
|
)},
|
||||||
|
|
||||||
|
{"log_benchmark", McRFPy_API::_logBenchmark, METH_VARARGS,
|
||||||
|
MCRF_FUNCTION(log_benchmark,
|
||||||
|
MCRF_SIG("(message: str)", "None"),
|
||||||
|
MCRF_DESC("Add a log message to the current benchmark frame."),
|
||||||
|
MCRF_ARGS_START
|
||||||
|
MCRF_ARG("message", "Text to associate with the current frame")
|
||||||
|
MCRF_RETURNS("None")
|
||||||
|
MCRF_RAISES("RuntimeError", "If no benchmark is currently running")
|
||||||
|
MCRF_NOTE("Messages appear in the 'logs' array of each frame in the output JSON.")
|
||||||
|
)},
|
||||||
|
|
||||||
{NULL, NULL, 0, NULL}
|
{NULL, NULL, 0, NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1244,6 +1274,18 @@ PyObject* McRFPy_API::_getMetrics(PyObject* self, PyObject* args) {
|
||||||
PyDict_SetItemString(dict, "ui_elements", PyLong_FromLong(game->metrics.uiElements));
|
PyDict_SetItemString(dict, "ui_elements", PyLong_FromLong(game->metrics.uiElements));
|
||||||
PyDict_SetItemString(dict, "visible_elements", PyLong_FromLong(game->metrics.visibleElements));
|
PyDict_SetItemString(dict, "visible_elements", PyLong_FromLong(game->metrics.visibleElements));
|
||||||
|
|
||||||
|
// #144 - Add detailed timing breakdown (in milliseconds)
|
||||||
|
PyDict_SetItemString(dict, "grid_render_time", PyFloat_FromDouble(game->metrics.gridRenderTime));
|
||||||
|
PyDict_SetItemString(dict, "entity_render_time", PyFloat_FromDouble(game->metrics.entityRenderTime));
|
||||||
|
PyDict_SetItemString(dict, "fov_overlay_time", PyFloat_FromDouble(game->metrics.fovOverlayTime));
|
||||||
|
PyDict_SetItemString(dict, "python_time", PyFloat_FromDouble(game->metrics.pythonScriptTime));
|
||||||
|
PyDict_SetItemString(dict, "animation_time", PyFloat_FromDouble(game->metrics.animationTime));
|
||||||
|
|
||||||
|
// #144 - Add grid-specific metrics
|
||||||
|
PyDict_SetItemString(dict, "grid_cells_rendered", PyLong_FromLong(game->metrics.gridCellsRendered));
|
||||||
|
PyDict_SetItemString(dict, "entities_rendered", PyLong_FromLong(game->metrics.entitiesRendered));
|
||||||
|
PyDict_SetItemString(dict, "total_entities", PyLong_FromLong(game->metrics.totalEntities));
|
||||||
|
|
||||||
// Add general metrics
|
// Add general metrics
|
||||||
PyDict_SetItemString(dict, "current_frame", PyLong_FromLong(game->getFrame()));
|
PyDict_SetItemString(dict, "current_frame", PyLong_FromLong(game->getFrame()));
|
||||||
PyDict_SetItemString(dict, "runtime", PyFloat_FromDouble(game->runtime.getElapsedTime().asSeconds()));
|
PyDict_SetItemString(dict, "runtime", PyFloat_FromDouble(game->runtime.getElapsedTime().asSeconds()));
|
||||||
|
|
@ -1261,6 +1303,42 @@ PyObject* McRFPy_API::_setDevConsole(PyObject* self, PyObject* args) {
|
||||||
Py_RETURN_NONE;
|
Py_RETURN_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Benchmark logging implementation (#104)
|
||||||
|
PyObject* McRFPy_API::_startBenchmark(PyObject* self, PyObject* args) {
|
||||||
|
try {
|
||||||
|
g_benchmarkLogger.start();
|
||||||
|
Py_RETURN_NONE;
|
||||||
|
} catch (const std::runtime_error& e) {
|
||||||
|
PyErr_SetString(PyExc_RuntimeError, e.what());
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject* McRFPy_API::_endBenchmark(PyObject* self, PyObject* args) {
|
||||||
|
try {
|
||||||
|
std::string filename = g_benchmarkLogger.end();
|
||||||
|
return PyUnicode_FromString(filename.c_str());
|
||||||
|
} catch (const std::runtime_error& e) {
|
||||||
|
PyErr_SetString(PyExc_RuntimeError, e.what());
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject* McRFPy_API::_logBenchmark(PyObject* self, PyObject* args) {
|
||||||
|
const char* message;
|
||||||
|
if (!PyArg_ParseTuple(args, "s", &message)) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
g_benchmarkLogger.log(message);
|
||||||
|
Py_RETURN_NONE;
|
||||||
|
} catch (const std::runtime_error& e) {
|
||||||
|
PyErr_SetString(PyExc_RuntimeError, e.what());
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Exception handling implementation
|
// Exception handling implementation
|
||||||
void McRFPy_API::signalPythonException() {
|
void McRFPy_API::signalPythonException() {
|
||||||
// Check if we should exit on exception (consult config via game)
|
// Check if we should exit on exception (consult config via game)
|
||||||
|
|
|
||||||
|
|
@ -82,6 +82,11 @@ public:
|
||||||
// Profiling/metrics
|
// Profiling/metrics
|
||||||
static PyObject* _getMetrics(PyObject*, PyObject*);
|
static PyObject* _getMetrics(PyObject*, PyObject*);
|
||||||
|
|
||||||
|
// Benchmark logging (#104)
|
||||||
|
static PyObject* _startBenchmark(PyObject*, PyObject*);
|
||||||
|
static PyObject* _endBenchmark(PyObject*, PyObject*);
|
||||||
|
static PyObject* _logBenchmark(PyObject*, PyObject*);
|
||||||
|
|
||||||
// Developer console
|
// Developer console
|
||||||
static PyObject* _setDevConsole(PyObject*, PyObject*);
|
static PyObject* _setDevConsole(PyObject*, PyObject*);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -206,30 +206,36 @@ void UIArc::resize(float w, float h) {
|
||||||
bool UIArc::setProperty(const std::string& name, float value) {
|
bool UIArc::setProperty(const std::string& name, float value) {
|
||||||
if (name == "radius") {
|
if (name == "radius") {
|
||||||
setRadius(value);
|
setRadius(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "start_angle") {
|
else if (name == "start_angle") {
|
||||||
setStartAngle(value);
|
setStartAngle(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "end_angle") {
|
else if (name == "end_angle") {
|
||||||
setEndAngle(value);
|
setEndAngle(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "thickness") {
|
else if (name == "thickness") {
|
||||||
setThickness(value);
|
setThickness(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "x") {
|
else if (name == "x") {
|
||||||
center.x = value;
|
center.x = value;
|
||||||
position = center;
|
position = center;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "y") {
|
else if (name == "y") {
|
||||||
center.y = value;
|
center.y = value;
|
||||||
position = center;
|
position = center;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -238,6 +244,7 @@ bool UIArc::setProperty(const std::string& name, float value) {
|
||||||
bool UIArc::setProperty(const std::string& name, const sf::Color& value) {
|
bool UIArc::setProperty(const std::string& name, const sf::Color& value) {
|
||||||
if (name == "color") {
|
if (name == "color") {
|
||||||
setColor(value);
|
setColor(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -246,6 +253,7 @@ bool UIArc::setProperty(const std::string& name, const sf::Color& value) {
|
||||||
bool UIArc::setProperty(const std::string& name, const sf::Vector2f& value) {
|
bool UIArc::setProperty(const std::string& name, const sf::Vector2f& value) {
|
||||||
if (name == "center") {
|
if (name == "center") {
|
||||||
setCenter(value);
|
setCenter(value);
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
|
|
@ -471,71 +471,84 @@ bool UICaption::setProperty(const std::string& name, float value) {
|
||||||
if (name == "x") {
|
if (name == "x") {
|
||||||
position.x = value;
|
position.x = value;
|
||||||
text.setPosition(position); // Keep text in sync
|
text.setPosition(position); // Keep text in sync
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "y") {
|
else if (name == "y") {
|
||||||
position.y = value;
|
position.y = value;
|
||||||
text.setPosition(position); // Keep text in sync
|
text.setPosition(position); // Keep text in sync
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "font_size" || name == "size") { // Support both for backward compatibility
|
else if (name == "font_size" || name == "size") { // Support both for backward compatibility
|
||||||
text.setCharacterSize(static_cast<unsigned int>(value));
|
text.setCharacterSize(static_cast<unsigned int>(value));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "outline") {
|
else if (name == "outline") {
|
||||||
text.setOutlineThickness(value);
|
text.setOutlineThickness(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.r") {
|
else if (name == "fill_color.r") {
|
||||||
auto color = text.getFillColor();
|
auto color = text.getFillColor();
|
||||||
color.r = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.r = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setFillColor(color);
|
text.setFillColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.g") {
|
else if (name == "fill_color.g") {
|
||||||
auto color = text.getFillColor();
|
auto color = text.getFillColor();
|
||||||
color.g = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.g = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setFillColor(color);
|
text.setFillColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.b") {
|
else if (name == "fill_color.b") {
|
||||||
auto color = text.getFillColor();
|
auto color = text.getFillColor();
|
||||||
color.b = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.b = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setFillColor(color);
|
text.setFillColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.a") {
|
else if (name == "fill_color.a") {
|
||||||
auto color = text.getFillColor();
|
auto color = text.getFillColor();
|
||||||
color.a = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.a = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setFillColor(color);
|
text.setFillColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "outline_color.r") {
|
else if (name == "outline_color.r") {
|
||||||
auto color = text.getOutlineColor();
|
auto color = text.getOutlineColor();
|
||||||
color.r = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.r = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setOutlineColor(color);
|
text.setOutlineColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "outline_color.g") {
|
else if (name == "outline_color.g") {
|
||||||
auto color = text.getOutlineColor();
|
auto color = text.getOutlineColor();
|
||||||
color.g = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.g = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setOutlineColor(color);
|
text.setOutlineColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "outline_color.b") {
|
else if (name == "outline_color.b") {
|
||||||
auto color = text.getOutlineColor();
|
auto color = text.getOutlineColor();
|
||||||
color.b = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.b = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setOutlineColor(color);
|
text.setOutlineColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "outline_color.a") {
|
else if (name == "outline_color.a") {
|
||||||
auto color = text.getOutlineColor();
|
auto color = text.getOutlineColor();
|
||||||
color.a = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
color.a = static_cast<sf::Uint8>(std::clamp(value, 0.0f, 255.0f));
|
||||||
text.setOutlineColor(color);
|
text.setOutlineColor(color);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "z_index") {
|
else if (name == "z_index") {
|
||||||
z_index = static_cast<int>(value);
|
z_index = static_cast<int>(value);
|
||||||
|
markDirty(); // #144 - Z-order change affects parent
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -544,10 +557,12 @@ bool UICaption::setProperty(const std::string& name, float value) {
|
||||||
bool UICaption::setProperty(const std::string& name, const sf::Color& value) {
|
bool UICaption::setProperty(const std::string& name, const sf::Color& value) {
|
||||||
if (name == "fill_color") {
|
if (name == "fill_color") {
|
||||||
text.setFillColor(value);
|
text.setFillColor(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "outline_color") {
|
else if (name == "outline_color") {
|
||||||
text.setOutlineColor(value);
|
text.setOutlineColor(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -556,6 +571,7 @@ bool UICaption::setProperty(const std::string& name, const sf::Color& value) {
|
||||||
bool UICaption::setProperty(const std::string& name, const std::string& value) {
|
bool UICaption::setProperty(const std::string& name, const std::string& value) {
|
||||||
if (name == "text") {
|
if (name == "text") {
|
||||||
text.setString(value);
|
text.setString(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
|
|
@ -172,15 +172,19 @@ void UICircle::resize(float w, float h) {
|
||||||
bool UICircle::setProperty(const std::string& name, float value) {
|
bool UICircle::setProperty(const std::string& name, float value) {
|
||||||
if (name == "radius") {
|
if (name == "radius") {
|
||||||
setRadius(value);
|
setRadius(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
} else if (name == "outline") {
|
} else if (name == "outline") {
|
||||||
setOutline(value);
|
setOutline(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
} else if (name == "x") {
|
} else if (name == "x") {
|
||||||
position.x = value;
|
position.x = value;
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
} else if (name == "y") {
|
} else if (name == "y") {
|
||||||
position.y = value;
|
position.y = value;
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -189,9 +193,11 @@ bool UICircle::setProperty(const std::string& name, float value) {
|
||||||
bool UICircle::setProperty(const std::string& name, const sf::Color& value) {
|
bool UICircle::setProperty(const std::string& name, const sf::Color& value) {
|
||||||
if (name == "fill_color") {
|
if (name == "fill_color") {
|
||||||
setFillColor(value);
|
setFillColor(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
} else if (name == "outline_color") {
|
} else if (name == "outline_color") {
|
||||||
setOutlineColor(value);
|
setOutlineColor(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -200,6 +206,7 @@ bool UICircle::setProperty(const std::string& name, const sf::Color& value) {
|
||||||
bool UICircle::setProperty(const std::string& name, const sf::Vector2f& value) {
|
bool UICircle::setProperty(const std::string& name, const sf::Vector2f& value) {
|
||||||
if (name == "center" || name == "position") {
|
if (name == "center" || name == "position") {
|
||||||
position = value;
|
position = value;
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
|
|
@ -674,15 +674,18 @@ bool UIEntity::setProperty(const std::string& name, float value) {
|
||||||
if (name == "x") {
|
if (name == "x") {
|
||||||
position.x = value;
|
position.x = value;
|
||||||
// Don't update sprite position here - UIGrid::render() handles the pixel positioning
|
// Don't update sprite position here - UIGrid::render() handles the pixel positioning
|
||||||
|
if (grid) grid->markDirty(); // #144 - Propagate to parent grid for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "y") {
|
else if (name == "y") {
|
||||||
position.y = value;
|
position.y = value;
|
||||||
// Don't update sprite position here - UIGrid::render() handles the pixel positioning
|
// Don't update sprite position here - UIGrid::render() handles the pixel positioning
|
||||||
|
if (grid) grid->markDirty(); // #144 - Propagate to parent grid for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "sprite_scale") {
|
else if (name == "sprite_scale") {
|
||||||
sprite.setScale(sf::Vector2f(value, value));
|
sprite.setScale(sf::Vector2f(value, value));
|
||||||
|
if (grid) grid->markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -691,6 +694,7 @@ bool UIEntity::setProperty(const std::string& name, float value) {
|
||||||
bool UIEntity::setProperty(const std::string& name, int value) {
|
bool UIEntity::setProperty(const std::string& name, int value) {
|
||||||
if (name == "sprite_index" || name == "sprite_number") {
|
if (name == "sprite_index" || name == "sprite_number") {
|
||||||
sprite.setSpriteIndex(value);
|
sprite.setSpriteIndex(value);
|
||||||
|
if (grid) grid->markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
|
|
@ -2725,54 +2725,66 @@ bool UIGrid::setProperty(const std::string& name, float value) {
|
||||||
position.x = value;
|
position.x = value;
|
||||||
box.setPosition(position);
|
box.setPosition(position);
|
||||||
output.setPosition(position);
|
output.setPosition(position);
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "y") {
|
else if (name == "y") {
|
||||||
position.y = value;
|
position.y = value;
|
||||||
box.setPosition(position);
|
box.setPosition(position);
|
||||||
output.setPosition(position);
|
output.setPosition(position);
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "w" || name == "width") {
|
else if (name == "w" || name == "width") {
|
||||||
box.setSize(sf::Vector2f(value, box.getSize().y));
|
box.setSize(sf::Vector2f(value, box.getSize().y));
|
||||||
output.setTextureRect(sf::IntRect(0, 0, box.getSize().x, box.getSize().y));
|
output.setTextureRect(sf::IntRect(0, 0, box.getSize().x, box.getSize().y));
|
||||||
|
markDirty(); // #144 - Size change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "h" || name == "height") {
|
else if (name == "h" || name == "height") {
|
||||||
box.setSize(sf::Vector2f(box.getSize().x, value));
|
box.setSize(sf::Vector2f(box.getSize().x, value));
|
||||||
output.setTextureRect(sf::IntRect(0, 0, box.getSize().x, box.getSize().y));
|
output.setTextureRect(sf::IntRect(0, 0, box.getSize().x, box.getSize().y));
|
||||||
|
markDirty(); // #144 - Size change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "center_x") {
|
else if (name == "center_x") {
|
||||||
center_x = value;
|
center_x = value;
|
||||||
|
markDirty(); // #144 - View change affects content
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "center_y") {
|
else if (name == "center_y") {
|
||||||
center_y = value;
|
center_y = value;
|
||||||
|
markDirty(); // #144 - View change affects content
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "zoom") {
|
else if (name == "zoom") {
|
||||||
zoom = value;
|
zoom = value;
|
||||||
|
markDirty(); // #144 - View change affects content
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "z_index") {
|
else if (name == "z_index") {
|
||||||
z_index = static_cast<int>(value);
|
z_index = static_cast<int>(value);
|
||||||
|
markDirty(); // #144 - Z-order change affects parent
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.r") {
|
else if (name == "fill_color.r") {
|
||||||
fill_color.r = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
fill_color.r = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.g") {
|
else if (name == "fill_color.g") {
|
||||||
fill_color.g = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
fill_color.g = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.b") {
|
else if (name == "fill_color.b") {
|
||||||
fill_color.b = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
fill_color.b = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "fill_color.a") {
|
else if (name == "fill_color.a") {
|
||||||
fill_color.a = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
fill_color.a = static_cast<uint8_t>(std::max(0.0f, std::min(255.0f, value)));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -2783,16 +2795,19 @@ bool UIGrid::setProperty(const std::string& name, const sf::Vector2f& value) {
|
||||||
position = value;
|
position = value;
|
||||||
box.setPosition(position);
|
box.setPosition(position);
|
||||||
output.setPosition(position);
|
output.setPosition(position);
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "size") {
|
else if (name == "size") {
|
||||||
box.setSize(value);
|
box.setSize(value);
|
||||||
output.setTextureRect(sf::IntRect(0, 0, box.getSize().x, box.getSize().y));
|
output.setTextureRect(sf::IntRect(0, 0, box.getSize().x, box.getSize().y));
|
||||||
|
markDirty(); // #144 - Size change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "center") {
|
else if (name == "center") {
|
||||||
center_x = value.x;
|
center_x = value.x;
|
||||||
center_y = value.y;
|
center_y = value.y;
|
||||||
|
markDirty(); // #144 - View change affects content
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
|
|
@ -207,36 +207,43 @@ bool UILine::setProperty(const std::string& name, float value) {
|
||||||
if (name == "thickness") {
|
if (name == "thickness") {
|
||||||
thickness = value;
|
thickness = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "x") {
|
else if (name == "x") {
|
||||||
float dx = value - position.x;
|
float dx = value - position.x;
|
||||||
move(dx, 0);
|
move(dx, 0);
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "y") {
|
else if (name == "y") {
|
||||||
float dy = value - position.y;
|
float dy = value - position.y;
|
||||||
move(0, dy);
|
move(0, dy);
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "start_x") {
|
else if (name == "start_x") {
|
||||||
start_pos.x = value;
|
start_pos.x = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "start_y") {
|
else if (name == "start_y") {
|
||||||
start_pos.y = value;
|
start_pos.y = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "end_x") {
|
else if (name == "end_x") {
|
||||||
end_pos.x = value;
|
end_pos.x = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "end_y") {
|
else if (name == "end_y") {
|
||||||
end_pos.y = value;
|
end_pos.y = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -246,6 +253,7 @@ bool UILine::setProperty(const std::string& name, const sf::Color& value) {
|
||||||
if (name == "color") {
|
if (name == "color") {
|
||||||
color = value;
|
color = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -255,11 +263,13 @@ bool UILine::setProperty(const std::string& name, const sf::Vector2f& value) {
|
||||||
if (name == "start") {
|
if (name == "start") {
|
||||||
start_pos = value;
|
start_pos = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "end") {
|
else if (name == "end") {
|
||||||
end_pos = value;
|
end_pos = value;
|
||||||
vertices_dirty = true;
|
vertices_dirty = true;
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
|
|
@ -499,27 +499,33 @@ bool UISprite::setProperty(const std::string& name, float value) {
|
||||||
if (name == "x") {
|
if (name == "x") {
|
||||||
position.x = value;
|
position.x = value;
|
||||||
sprite.setPosition(position); // Keep sprite in sync
|
sprite.setPosition(position); // Keep sprite in sync
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "y") {
|
else if (name == "y") {
|
||||||
position.y = value;
|
position.y = value;
|
||||||
sprite.setPosition(position); // Keep sprite in sync
|
sprite.setPosition(position); // Keep sprite in sync
|
||||||
|
markDirty(); // #144 - Propagate to parent for texture caching
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "scale") {
|
else if (name == "scale") {
|
||||||
sprite.setScale(sf::Vector2f(value, value));
|
sprite.setScale(sf::Vector2f(value, value));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "scale_x") {
|
else if (name == "scale_x") {
|
||||||
sprite.setScale(sf::Vector2f(value, sprite.getScale().y));
|
sprite.setScale(sf::Vector2f(value, sprite.getScale().y));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "scale_y") {
|
else if (name == "scale_y") {
|
||||||
sprite.setScale(sf::Vector2f(sprite.getScale().x, value));
|
sprite.setScale(sf::Vector2f(sprite.getScale().x, value));
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "z_index") {
|
else if (name == "z_index") {
|
||||||
z_index = static_cast<int>(value);
|
z_index = static_cast<int>(value);
|
||||||
|
markDirty(); // #144 - Z-order change affects parent
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -528,10 +534,12 @@ bool UISprite::setProperty(const std::string& name, float value) {
|
||||||
bool UISprite::setProperty(const std::string& name, int value) {
|
bool UISprite::setProperty(const std::string& name, int value) {
|
||||||
if (name == "sprite_index" || name == "sprite_number") {
|
if (name == "sprite_index" || name == "sprite_number") {
|
||||||
setSpriteIndex(value);
|
setSpriteIndex(value);
|
||||||
|
markDirty(); // #144 - Content change
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (name == "z_index") {
|
else if (name == "z_index") {
|
||||||
z_index = value;
|
z_index = value;
|
||||||
|
markDirty(); // #144 - Z-order change affects parent
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,359 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Comprehensive Performance Benchmark Suite for McRogueFace (#104, #144)
|
||||||
|
|
||||||
|
Runs 6 benchmark scenarios to establish baseline performance metrics:
|
||||||
|
1. Empty scene - Pure engine overhead
|
||||||
|
2. Static UI - 100 frames, no animation (best case for caching)
|
||||||
|
3. Animated UI - 100 frames, all animating (worst case for caching)
|
||||||
|
4. Mixed UI - 100 frames, 10 animating (realistic case)
|
||||||
|
5. Deep hierarchy - 5 levels of nesting (propagation cost)
|
||||||
|
6. Grid stress - Large grid with entities (known bottleneck)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
./mcrogueface --headless --exec tests/benchmarks/benchmark_suite.py
|
||||||
|
|
||||||
|
Results are printed to stdout in a format suitable for tracking over time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import mcrfpy
|
||||||
|
import sys
|
||||||
|
import random
|
||||||
|
|
||||||
|
# Benchmark configuration
|
||||||
|
WARMUP_FRAMES = 30 # Frames to skip before measuring
|
||||||
|
MEASURE_FRAMES = 120 # Frames to measure (2 seconds at 60fps)
|
||||||
|
FRAME_BUDGET_MS = 16.67 # Target: 60 FPS
|
||||||
|
|
||||||
|
# Storage for results
|
||||||
|
results = {}
|
||||||
|
current_scenario = None
|
||||||
|
frame_count = 0
|
||||||
|
metrics_samples = []
|
||||||
|
|
||||||
|
|
||||||
|
def collect_metrics(runtime):
|
||||||
|
"""Timer callback to collect metrics each frame."""
|
||||||
|
global frame_count, metrics_samples
|
||||||
|
|
||||||
|
frame_count += 1
|
||||||
|
|
||||||
|
# Skip warmup frames
|
||||||
|
if frame_count <= WARMUP_FRAMES:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Collect sample
|
||||||
|
m = mcrfpy.getMetrics()
|
||||||
|
metrics_samples.append({
|
||||||
|
'frame_time': m['frame_time'],
|
||||||
|
'avg_frame_time': m['avg_frame_time'],
|
||||||
|
'fps': m['fps'],
|
||||||
|
'draw_calls': m['draw_calls'],
|
||||||
|
'ui_elements': m['ui_elements'],
|
||||||
|
'visible_elements': m['visible_elements'],
|
||||||
|
'grid_render_time': m['grid_render_time'],
|
||||||
|
'entity_render_time': m['entity_render_time'],
|
||||||
|
'python_time': m['python_time'],
|
||||||
|
'animation_time': m['animation_time'],
|
||||||
|
'grid_cells_rendered': m['grid_cells_rendered'],
|
||||||
|
'entities_rendered': m['entities_rendered'],
|
||||||
|
})
|
||||||
|
|
||||||
|
# Check if we have enough samples
|
||||||
|
if len(metrics_samples) >= MEASURE_FRAMES:
|
||||||
|
finish_scenario()
|
||||||
|
|
||||||
|
|
||||||
|
def finish_scenario():
|
||||||
|
"""Calculate statistics and store results for current scenario."""
|
||||||
|
global results, current_scenario, metrics_samples
|
||||||
|
|
||||||
|
mcrfpy.delTimer("benchmark_collect")
|
||||||
|
|
||||||
|
if not metrics_samples:
|
||||||
|
print(f" WARNING: No samples collected for {current_scenario}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Calculate averages
|
||||||
|
n = len(metrics_samples)
|
||||||
|
avg = lambda key: sum(s[key] for s in metrics_samples) / n
|
||||||
|
|
||||||
|
results[current_scenario] = {
|
||||||
|
'samples': n,
|
||||||
|
'avg_frame_time': avg('frame_time'),
|
||||||
|
'avg_fps': avg('fps'),
|
||||||
|
'avg_draw_calls': avg('draw_calls'),
|
||||||
|
'avg_ui_elements': avg('ui_elements'),
|
||||||
|
'avg_grid_render_time': avg('grid_render_time'),
|
||||||
|
'avg_entity_render_time': avg('entity_render_time'),
|
||||||
|
'avg_python_time': avg('python_time'),
|
||||||
|
'avg_animation_time': avg('animation_time'),
|
||||||
|
'avg_grid_cells': avg('grid_cells_rendered'),
|
||||||
|
'avg_entities': avg('entities_rendered'),
|
||||||
|
'max_frame_time': max(s['frame_time'] for s in metrics_samples),
|
||||||
|
'min_frame_time': min(s['frame_time'] for s in metrics_samples),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Calculate percentage breakdown
|
||||||
|
r = results[current_scenario]
|
||||||
|
total = r['avg_frame_time']
|
||||||
|
if total > 0:
|
||||||
|
r['pct_grid'] = (r['avg_grid_render_time'] / total) * 100
|
||||||
|
r['pct_entity'] = (r['avg_entity_render_time'] / total) * 100
|
||||||
|
r['pct_python'] = (r['avg_python_time'] / total) * 100
|
||||||
|
r['pct_animation'] = (r['avg_animation_time'] / total) * 100
|
||||||
|
r['pct_other'] = 100 - r['pct_grid'] - r['pct_entity'] - r['pct_python'] - r['pct_animation']
|
||||||
|
|
||||||
|
print(f" Completed: {n} samples, avg {r['avg_frame_time']:.2f}ms ({r['avg_fps']:.0f} FPS)")
|
||||||
|
|
||||||
|
# Run next scenario
|
||||||
|
run_next_scenario()
|
||||||
|
|
||||||
|
|
||||||
|
def run_next_scenario():
|
||||||
|
"""Run the next benchmark scenario in sequence."""
|
||||||
|
global current_scenario, frame_count, metrics_samples
|
||||||
|
|
||||||
|
scenarios = [
|
||||||
|
('1_empty', setup_empty_scene),
|
||||||
|
('2_static_100', setup_static_100),
|
||||||
|
('3_animated_100', setup_animated_100),
|
||||||
|
('4_mixed_100', setup_mixed_100),
|
||||||
|
('5_deep_hierarchy', setup_deep_hierarchy),
|
||||||
|
('6_grid_stress', setup_grid_stress),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Find current index
|
||||||
|
current_idx = -1
|
||||||
|
if current_scenario:
|
||||||
|
for i, (name, _) in enumerate(scenarios):
|
||||||
|
if name == current_scenario:
|
||||||
|
current_idx = i
|
||||||
|
break
|
||||||
|
|
||||||
|
# Move to next
|
||||||
|
next_idx = current_idx + 1
|
||||||
|
|
||||||
|
if next_idx >= len(scenarios):
|
||||||
|
# All done
|
||||||
|
print_results()
|
||||||
|
return
|
||||||
|
|
||||||
|
# Setup next scenario
|
||||||
|
current_scenario = scenarios[next_idx][0]
|
||||||
|
frame_count = 0
|
||||||
|
metrics_samples = []
|
||||||
|
|
||||||
|
print(f"\n[{next_idx + 1}/{len(scenarios)}] Running: {current_scenario}")
|
||||||
|
|
||||||
|
# Run setup function
|
||||||
|
scenarios[next_idx][1]()
|
||||||
|
|
||||||
|
# Start collection timer (runs every frame)
|
||||||
|
mcrfpy.setTimer("benchmark_collect", collect_metrics, 1)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Scenario Setup Functions
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
def setup_empty_scene():
|
||||||
|
"""Scenario 1: Empty scene - pure engine overhead."""
|
||||||
|
mcrfpy.createScene("bench_empty")
|
||||||
|
mcrfpy.setScene("bench_empty")
|
||||||
|
|
||||||
|
|
||||||
|
def setup_static_100():
|
||||||
|
"""Scenario 2: 100 static frames - best case for caching."""
|
||||||
|
mcrfpy.createScene("bench_static")
|
||||||
|
ui = mcrfpy.sceneUI("bench_static")
|
||||||
|
|
||||||
|
# Create 100 frames in a 10x10 grid
|
||||||
|
for i in range(100):
|
||||||
|
x = (i % 10) * 100 + 12
|
||||||
|
y = (i // 10) * 70 + 12
|
||||||
|
frame = mcrfpy.Frame(pos=(x, y), size=(80, 55))
|
||||||
|
frame.fill_color = mcrfpy.Color(50 + i, 100, 150)
|
||||||
|
frame.outline = 2
|
||||||
|
frame.outline_color = mcrfpy.Color(255, 255, 255)
|
||||||
|
|
||||||
|
# Add a caption child
|
||||||
|
cap = mcrfpy.Caption(text=f"F{i}", pos=(5, 5))
|
||||||
|
cap.fill_color = mcrfpy.Color(255, 255, 255)
|
||||||
|
frame.children.append(cap)
|
||||||
|
|
||||||
|
ui.append(frame)
|
||||||
|
|
||||||
|
mcrfpy.setScene("bench_static")
|
||||||
|
|
||||||
|
|
||||||
|
def setup_animated_100():
|
||||||
|
"""Scenario 3: 100 frames all animating - worst case for caching."""
|
||||||
|
mcrfpy.createScene("bench_animated")
|
||||||
|
ui = mcrfpy.sceneUI("bench_animated")
|
||||||
|
|
||||||
|
frames = []
|
||||||
|
for i in range(100):
|
||||||
|
x = (i % 10) * 100 + 12
|
||||||
|
y = (i // 10) * 70 + 12
|
||||||
|
frame = mcrfpy.Frame(pos=(x, y), size=(80, 55))
|
||||||
|
frame.fill_color = mcrfpy.Color(50 + i, 100, 150)
|
||||||
|
frames.append(frame)
|
||||||
|
ui.append(frame)
|
||||||
|
|
||||||
|
mcrfpy.setScene("bench_animated")
|
||||||
|
|
||||||
|
# Start animations on all frames (color animation = content change)
|
||||||
|
for i, frame in enumerate(frames):
|
||||||
|
# Animate fill color - this dirties the frame
|
||||||
|
target_r = (i * 17) % 256
|
||||||
|
anim = mcrfpy.Animation("fill_color.r", float(target_r), 2.0, "linear")
|
||||||
|
anim.start(frame)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_mixed_100():
|
||||||
|
"""Scenario 4: 100 frames, only 10 animating - realistic case."""
|
||||||
|
mcrfpy.createScene("bench_mixed")
|
||||||
|
ui = mcrfpy.sceneUI("bench_mixed")
|
||||||
|
|
||||||
|
frames = []
|
||||||
|
for i in range(100):
|
||||||
|
x = (i % 10) * 100 + 12
|
||||||
|
y = (i // 10) * 70 + 12
|
||||||
|
frame = mcrfpy.Frame(pos=(x, y), size=(80, 55))
|
||||||
|
frame.fill_color = mcrfpy.Color(50 + i, 100, 150)
|
||||||
|
frames.append(frame)
|
||||||
|
ui.append(frame)
|
||||||
|
|
||||||
|
mcrfpy.setScene("bench_mixed")
|
||||||
|
|
||||||
|
# Animate only 10 frames (every 10th)
|
||||||
|
for i in range(0, 100, 10):
|
||||||
|
frame = frames[i]
|
||||||
|
anim = mcrfpy.Animation("fill_color.r", 255.0, 2.0, "easeInOut")
|
||||||
|
anim.start(frame)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_deep_hierarchy():
|
||||||
|
"""Scenario 5: 5 levels of nesting - test dirty flag propagation cost."""
|
||||||
|
mcrfpy.createScene("bench_deep")
|
||||||
|
ui = mcrfpy.sceneUI("bench_deep")
|
||||||
|
|
||||||
|
# Create 10 trees, each with 5 levels of nesting
|
||||||
|
deepest_frames = []
|
||||||
|
|
||||||
|
for tree in range(10):
|
||||||
|
x_offset = tree * 100 + 12
|
||||||
|
current_parent = None
|
||||||
|
|
||||||
|
for level in range(5):
|
||||||
|
frame = mcrfpy.Frame(
|
||||||
|
pos=(10, 10) if level > 0 else (x_offset, 100),
|
||||||
|
size=(80 - level * 10, 500 - level * 80)
|
||||||
|
)
|
||||||
|
frame.fill_color = mcrfpy.Color(50 + level * 40, 100, 200 - level * 30)
|
||||||
|
frame.outline = 1
|
||||||
|
|
||||||
|
if current_parent is None:
|
||||||
|
ui.append(frame)
|
||||||
|
else:
|
||||||
|
current_parent.children.append(frame)
|
||||||
|
|
||||||
|
current_parent = frame
|
||||||
|
|
||||||
|
if level == 4: # Deepest level
|
||||||
|
deepest_frames.append(frame)
|
||||||
|
|
||||||
|
mcrfpy.setScene("bench_deep")
|
||||||
|
|
||||||
|
# Animate the deepest frames - tests propagation up the hierarchy
|
||||||
|
for frame in deepest_frames:
|
||||||
|
anim = mcrfpy.Animation("fill_color.g", 255.0, 2.0, "linear")
|
||||||
|
anim.start(frame)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_grid_stress():
|
||||||
|
"""Scenario 6: Large grid with entities - known performance bottleneck."""
|
||||||
|
mcrfpy.createScene("bench_grid")
|
||||||
|
ui = mcrfpy.sceneUI("bench_grid")
|
||||||
|
|
||||||
|
# Create a 50x50 grid (2500 cells)
|
||||||
|
grid = mcrfpy.Grid(grid_size=(50, 50), pos=(50, 50), size=(700, 700))
|
||||||
|
grid.zoom = 0.75
|
||||||
|
grid.center = (400, 400) # Center view
|
||||||
|
ui.append(grid)
|
||||||
|
|
||||||
|
# Fill with alternating colors
|
||||||
|
for y in range(50):
|
||||||
|
for x in range(50):
|
||||||
|
cell = grid.at(x, y)
|
||||||
|
if (x + y) % 2 == 0:
|
||||||
|
cell.color = mcrfpy.Color(60, 60, 80)
|
||||||
|
else:
|
||||||
|
cell.color = mcrfpy.Color(40, 40, 60)
|
||||||
|
|
||||||
|
# Add 50 entities
|
||||||
|
try:
|
||||||
|
texture = mcrfpy.Texture("assets/kenney_tinydungeon.png", 16, 16)
|
||||||
|
|
||||||
|
for i in range(50):
|
||||||
|
# Entity takes positional args: (position, texture, sprite_index, grid)
|
||||||
|
pos = mcrfpy.Vector(random.randint(5, 45), random.randint(5, 45))
|
||||||
|
entity = mcrfpy.Entity(pos, texture, random.randint(0, 100), grid)
|
||||||
|
grid.entities.append(entity)
|
||||||
|
except Exception as e:
|
||||||
|
print(f" Note: Could not create entities: {e}")
|
||||||
|
|
||||||
|
mcrfpy.setScene("bench_grid")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Results Output
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
def print_results():
|
||||||
|
"""Print final benchmark results."""
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
print("BENCHMARK RESULTS")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
print(f"\n{'Scenario':<20} {'Avg FPS':>8} {'Avg ms':>8} {'Max ms':>8} {'Draw Calls':>10}")
|
||||||
|
print("-" * 70)
|
||||||
|
|
||||||
|
for name, r in results.items():
|
||||||
|
print(f"{name:<20} {r['avg_fps']:>8.1f} {r['avg_frame_time']:>8.2f} {r['max_frame_time']:>8.2f} {r['avg_draw_calls']:>10.0f}")
|
||||||
|
|
||||||
|
print("\n" + "-" * 70)
|
||||||
|
print("TIMING BREAKDOWN (% of frame time)")
|
||||||
|
print("-" * 70)
|
||||||
|
print(f"{'Scenario':<20} {'Grid':>8} {'Entity':>8} {'Python':>8} {'Anim':>8} {'Other':>8}")
|
||||||
|
print("-" * 70)
|
||||||
|
|
||||||
|
for name, r in results.items():
|
||||||
|
if 'pct_grid' in r:
|
||||||
|
print(f"{name:<20} {r['pct_grid']:>7.1f}% {r['pct_entity']:>7.1f}% {r['pct_python']:>7.1f}% {r['pct_animation']:>7.1f}% {r['pct_other']:>7.1f}%")
|
||||||
|
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
|
||||||
|
# Performance assessment
|
||||||
|
print("\nPERFORMANCE ASSESSMENT:")
|
||||||
|
for name, r in results.items():
|
||||||
|
status = "PASS" if r['avg_frame_time'] < FRAME_BUDGET_MS else "OVER BUDGET"
|
||||||
|
print(f" {name}: {status} ({r['avg_frame_time']:.2f}ms vs {FRAME_BUDGET_MS:.2f}ms budget)")
|
||||||
|
|
||||||
|
print("\nBenchmark complete.")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Main Entry Point
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("=" * 70)
|
||||||
|
print("McRogueFace Performance Benchmark Suite")
|
||||||
|
print("=" * 70)
|
||||||
|
print(f"Configuration: {WARMUP_FRAMES} warmup frames, {MEASURE_FRAMES} measurement frames")
|
||||||
|
print(f"Target: {FRAME_BUDGET_MS:.2f}ms per frame (60 FPS)")
|
||||||
|
|
||||||
|
# Start the benchmark sequence
|
||||||
|
run_next_scenario()
|
||||||
|
|
@ -0,0 +1,135 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Test benchmark logging functionality (Issue #104)"""
|
||||||
|
import mcrfpy
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
def run_test(runtime):
|
||||||
|
"""Timer callback to test benchmark logging"""
|
||||||
|
# Stop the benchmark and get filename
|
||||||
|
try:
|
||||||
|
filename = mcrfpy.end_benchmark()
|
||||||
|
print(f"Benchmark written to: {filename}")
|
||||||
|
|
||||||
|
# Check file exists
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
print(f"FAIL: Benchmark file not found: {filename}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Parse and validate JSON
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Validate structure
|
||||||
|
if 'benchmark' not in data:
|
||||||
|
print("FAIL: Missing 'benchmark' key")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if 'frames' not in data:
|
||||||
|
print("FAIL: Missing 'frames' key")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Check benchmark metadata
|
||||||
|
bench = data['benchmark']
|
||||||
|
if 'pid' not in bench:
|
||||||
|
print("FAIL: Missing 'pid' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
if 'start_time' not in bench:
|
||||||
|
print("FAIL: Missing 'start_time' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
if 'end_time' not in bench:
|
||||||
|
print("FAIL: Missing 'end_time' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
if 'total_frames' not in bench:
|
||||||
|
print("FAIL: Missing 'total_frames' in benchmark")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f" PID: {bench['pid']}")
|
||||||
|
print(f" Duration: {bench['duration_seconds']:.3f}s")
|
||||||
|
print(f" Frames: {bench['total_frames']}")
|
||||||
|
|
||||||
|
# Check we have frames
|
||||||
|
if len(data['frames']) == 0:
|
||||||
|
print("FAIL: No frames recorded")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Check frame structure
|
||||||
|
frame = data['frames'][0]
|
||||||
|
required_fields = ['frame_number', 'timestamp_ms', 'frame_time_ms', 'fps',
|
||||||
|
'work_time_ms', 'grid_render_ms', 'entity_render_ms',
|
||||||
|
'python_time_ms', 'draw_calls', 'ui_elements', 'logs']
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in frame:
|
||||||
|
print(f"FAIL: Missing field '{field}' in frame")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Check log message was captured
|
||||||
|
found_log = False
|
||||||
|
for frame in data['frames']:
|
||||||
|
if 'Test log message' in frame.get('logs', []):
|
||||||
|
found_log = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not found_log:
|
||||||
|
print("FAIL: Log message not found in any frame")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Show timing breakdown
|
||||||
|
f0 = data['frames'][0]
|
||||||
|
print(f" First frame FPS: {f0['fps']}")
|
||||||
|
print(f" Frame time: {f0['frame_time_ms']:.3f}ms, Work time: {f0['work_time_ms']:.3f}ms")
|
||||||
|
if f0['frame_time_ms'] > 0:
|
||||||
|
load_pct = (f0['work_time_ms'] / f0['frame_time_ms']) * 100
|
||||||
|
print(f" Load: {load_pct:.1f}% (sleep time: {f0['frame_time_ms'] - f0['work_time_ms']:.3f}ms)")
|
||||||
|
print(f" Log messages captured: Yes")
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
os.remove(filename)
|
||||||
|
print(f" Cleaned up: {filename}")
|
||||||
|
|
||||||
|
print("PASS")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"FAIL: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Test error handling - calling end without start
|
||||||
|
try:
|
||||||
|
mcrfpy.end_benchmark()
|
||||||
|
print("FAIL: end_benchmark() should have raised RuntimeError")
|
||||||
|
sys.exit(1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
print(f"Correct error on end without start: {e}")
|
||||||
|
|
||||||
|
# Test error handling - logging without start
|
||||||
|
try:
|
||||||
|
mcrfpy.log_benchmark("test")
|
||||||
|
print("FAIL: log_benchmark() should have raised RuntimeError")
|
||||||
|
sys.exit(1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
print(f"Correct error on log without start: {e}")
|
||||||
|
|
||||||
|
# Start the benchmark
|
||||||
|
mcrfpy.start_benchmark()
|
||||||
|
print("Benchmark started")
|
||||||
|
|
||||||
|
# Test error handling - double start
|
||||||
|
try:
|
||||||
|
mcrfpy.start_benchmark()
|
||||||
|
print("FAIL: double start_benchmark() should have raised RuntimeError")
|
||||||
|
sys.exit(1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
print(f"Correct error on double start: {e}")
|
||||||
|
|
||||||
|
# Log a test message
|
||||||
|
mcrfpy.log_benchmark("Test log message")
|
||||||
|
print("Logged test message")
|
||||||
|
|
||||||
|
# Set up scene and run for a few frames
|
||||||
|
mcrfpy.createScene("test")
|
||||||
|
mcrfpy.setScene("test")
|
||||||
|
|
||||||
|
# Schedule test completion after ~100ms (to capture some frames)
|
||||||
|
mcrfpy.setTimer("test", run_test, 100)
|
||||||
Loading…
Reference in New Issue