feat: Add ColorLayer perspective methods and patrol demo (addresses #113)

ColorLayer enhancements:
- fill_rect(x, y, w, h, color): Fill rectangular region
- draw_fov(source, radius, fov, visible, discovered, unknown): One-time FOV draw
- apply_perspective(entity, visible, discovered, unknown): Bind layer to entity
- update_perspective(): Refresh layer from bound entity's gridstate
- clear_perspective(): Remove entity binding

New demo: tests/demo/perspective_patrol_demo.py
- Entity patrols around 10x10 central obstacle
- FOV layer shows visible/discovered/unknown states
- [R] to reset vision, [Space] to pause, [Q] to quit
- Demonstrates fog of war memory system

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
John McCardle 2025-12-01 16:26:30 -05:00
parent c5b4200dea
commit a529e5eac3
3 changed files with 407 additions and 1 deletions

View File

@ -1,5 +1,6 @@
#include "GridLayers.h"
#include "UIGrid.h"
#include "UIEntity.h"
#include "PyColor.h"
#include "PyTexture.h"
#include "PyFOV.h"
@ -110,7 +111,11 @@ void GridLayer::ensureChunkTexture(int chunk_idx, int cell_width, int cell_heigh
ColorLayer::ColorLayer(int z_index, int grid_x, int grid_y, UIGrid* parent)
: GridLayer(GridLayerType::Color, z_index, grid_x, grid_y, parent),
colors(grid_x * grid_y, sf::Color::Transparent)
colors(grid_x * grid_y, sf::Color::Transparent),
perspective_visible(255, 255, 200, 64),
perspective_discovered(100, 100, 100, 128),
perspective_unknown(0, 0, 0, 255),
has_perspective(false)
{}
sf::Color& ColorLayer::at(int x, int y) {
@ -195,6 +200,48 @@ void ColorLayer::drawFOV(int source_x, int source_y, int radius,
markDirty();
}
void ColorLayer::applyPerspective(std::shared_ptr<UIEntity> entity,
const sf::Color& visible,
const sf::Color& discovered,
const sf::Color& unknown) {
perspective_entity = entity;
perspective_visible = visible;
perspective_discovered = discovered;
perspective_unknown = unknown;
has_perspective = true;
// Initial draw based on entity's current position
updatePerspective();
}
void ColorLayer::updatePerspective() {
if (!has_perspective) return;
auto entity = perspective_entity.lock();
if (!entity) {
// Entity was deleted, clear perspective
has_perspective = false;
return;
}
if (!parent_grid) return;
// Get entity position and grid's FOV settings
int source_x = static_cast<int>(entity->position.x);
int source_y = static_cast<int>(entity->position.y);
int radius = parent_grid->fov_radius;
TCOD_fov_algorithm_t algorithm = parent_grid->fov_algorithm;
// Use drawFOV with our stored colors
drawFOV(source_x, source_y, radius, algorithm,
perspective_visible, perspective_discovered, perspective_unknown);
}
void ColorLayer::clearPerspective() {
perspective_entity.reset();
has_perspective = false;
}
void ColorLayer::resize(int new_grid_x, int new_grid_y) {
std::vector<sf::Color> new_colors(new_grid_x * new_grid_y, sf::Color::Transparent);
@ -539,6 +586,22 @@ PyMethodDef PyGridLayerAPI::ColorLayer_methods[] = {
" discovered (Color): Color for previously seen cells\n"
" unknown (Color): Color for never-seen cells\n\n"
"Note: Layer must be attached to a grid for FOV calculation."},
{"apply_perspective", (PyCFunction)PyGridLayerAPI::ColorLayer_apply_perspective, METH_VARARGS | METH_KEYWORDS,
"apply_perspective(entity, visible=None, discovered=None, unknown=None)\n\n"
"Bind this layer to an entity for automatic FOV updates.\n\n"
"Args:\n"
" entity (Entity): The entity whose perspective to track\n"
" visible (Color): Color for currently visible cells\n"
" discovered (Color): Color for previously seen cells\n"
" unknown (Color): Color for never-seen cells\n\n"
"After binding, call update_perspective() when the entity moves."},
{"update_perspective", (PyCFunction)PyGridLayerAPI::ColorLayer_update_perspective, METH_NOARGS,
"update_perspective()\n\n"
"Redraw FOV based on the bound entity's current position.\n\n"
"Call this after the entity moves to update the visibility layer."},
{"clear_perspective", (PyCFunction)PyGridLayerAPI::ColorLayer_clear_perspective, METH_NOARGS,
"clear_perspective()\n\n"
"Remove the perspective binding from this layer."},
{NULL}
};
@ -865,6 +928,122 @@ PyObject* PyGridLayerAPI::ColorLayer_draw_fov(PyColorLayerObject* self, PyObject
Py_RETURN_NONE;
}
PyObject* PyGridLayerAPI::ColorLayer_apply_perspective(PyColorLayerObject* self, PyObject* args, PyObject* kwds) {
static const char* kwlist[] = {"entity", "visible", "discovered", "unknown", NULL};
PyObject* entity_obj;
PyObject* visible_obj = nullptr;
PyObject* discovered_obj = nullptr;
PyObject* unknown_obj = nullptr;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OOO", const_cast<char**>(kwlist),
&entity_obj, &visible_obj, &discovered_obj, &unknown_obj)) {
return NULL;
}
if (!self->data) {
PyErr_SetString(PyExc_RuntimeError, "Layer has no data");
return NULL;
}
if (!self->grid) {
PyErr_SetString(PyExc_RuntimeError, "Layer is not attached to a grid");
return NULL;
}
// Get the Entity type
auto* mcrfpy_module = PyImport_ImportModule("mcrfpy");
if (!mcrfpy_module) return NULL;
auto* entity_type = PyObject_GetAttrString(mcrfpy_module, "Entity");
Py_DECREF(mcrfpy_module);
if (!entity_type) return NULL;
if (!PyObject_IsInstance(entity_obj, entity_type)) {
Py_DECREF(entity_type);
PyErr_SetString(PyExc_TypeError, "entity must be an Entity object");
return NULL;
}
Py_DECREF(entity_type);
// Get the shared_ptr to the entity
PyUIEntityObject* py_entity = (PyUIEntityObject*)entity_obj;
if (!py_entity->data) {
PyErr_SetString(PyExc_RuntimeError, "Entity has no data");
return NULL;
}
// Helper lambda to parse color
auto parse_color = [](PyObject* obj, sf::Color& out, const sf::Color& default_val, const char* name) -> bool {
if (!obj || obj == Py_None) {
out = default_val;
return true;
}
auto* mcrfpy_module = PyImport_ImportModule("mcrfpy");
if (!mcrfpy_module) return false;
auto* color_type = PyObject_GetAttrString(mcrfpy_module, "Color");
Py_DECREF(mcrfpy_module);
if (!color_type) return false;
if (PyObject_IsInstance(obj, color_type)) {
out = ((PyColorObject*)obj)->data;
Py_DECREF(color_type);
return true;
} else if (PyTuple_Check(obj)) {
int r, g, b, a = 255;
if (!PyArg_ParseTuple(obj, "iii|i", &r, &g, &b, &a)) {
Py_DECREF(color_type);
return false;
}
out = sf::Color(r, g, b, a);
Py_DECREF(color_type);
return true;
}
Py_DECREF(color_type);
PyErr_Format(PyExc_TypeError, "%s must be a Color object or (r, g, b[, a]) tuple", name);
return false;
};
// Parse colors with defaults
sf::Color visible_color(255, 255, 200, 64);
sf::Color discovered_color(100, 100, 100, 128);
sf::Color unknown_color(0, 0, 0, 255);
if (!parse_color(visible_obj, visible_color, visible_color, "visible")) return NULL;
if (!parse_color(discovered_obj, discovered_color, discovered_color, "discovered")) return NULL;
if (!parse_color(unknown_obj, unknown_color, unknown_color, "unknown")) return NULL;
self->data->applyPerspective(py_entity->data, visible_color, discovered_color, unknown_color);
Py_RETURN_NONE;
}
PyObject* PyGridLayerAPI::ColorLayer_update_perspective(PyColorLayerObject* self, PyObject* args) {
if (!self->data) {
PyErr_SetString(PyExc_RuntimeError, "Layer has no data");
return NULL;
}
if (!self->data->has_perspective) {
PyErr_SetString(PyExc_RuntimeError, "Layer has no perspective binding. Call apply_perspective() first.");
return NULL;
}
self->data->updatePerspective();
Py_RETURN_NONE;
}
PyObject* PyGridLayerAPI::ColorLayer_clear_perspective(PyColorLayerObject* self, PyObject* args) {
if (!self->data) {
PyErr_SetString(PyExc_RuntimeError, "Layer has no data");
return NULL;
}
self->data->clearPerspective();
Py_RETURN_NONE;
}
PyObject* PyGridLayerAPI::ColorLayer_get_z_index(PyColorLayerObject* self, void* closure) {
if (!self->data) {
PyErr_SetString(PyExc_RuntimeError, "Layer has no data");

View File

@ -11,6 +11,7 @@
// Forward declarations
class UIGrid;
class PyTexture;
class UIEntity;
// Include PyTexture.h for PyTextureObject (typedef, not struct)
#include "PyTexture.h"
@ -89,6 +90,13 @@ class ColorLayer : public GridLayer {
public:
std::vector<sf::Color> colors;
// Perspective binding (#113) - binds layer to entity for automatic FOV updates
std::weak_ptr<UIEntity> perspective_entity;
sf::Color perspective_visible;
sf::Color perspective_discovered;
sf::Color perspective_unknown;
bool has_perspective;
ColorLayer(int z_index, int grid_x, int grid_y, UIGrid* parent);
// Access color at position
@ -109,6 +117,18 @@ public:
const sf::Color& discovered,
const sf::Color& unknown);
// Perspective binding (#113) - bind layer to entity for automatic updates
void applyPerspective(std::shared_ptr<UIEntity> entity,
const sf::Color& visible,
const sf::Color& discovered,
const sf::Color& unknown);
// Update perspective - redraws based on bound entity's current position
void updatePerspective();
// Clear perspective binding
void clearPerspective();
// Render a specific chunk to its texture (called when chunk is dirty AND visible)
void renderChunkToTexture(int chunk_x, int chunk_y, int cell_width, int cell_height) override;
@ -185,6 +205,9 @@ public:
static PyObject* ColorLayer_fill(PyColorLayerObject* self, PyObject* args);
static PyObject* ColorLayer_fill_rect(PyColorLayerObject* self, PyObject* args, PyObject* kwds);
static PyObject* ColorLayer_draw_fov(PyColorLayerObject* self, PyObject* args, PyObject* kwds);
static PyObject* ColorLayer_apply_perspective(PyColorLayerObject* self, PyObject* args, PyObject* kwds);
static PyObject* ColorLayer_update_perspective(PyColorLayerObject* self, PyObject* args);
static PyObject* ColorLayer_clear_perspective(PyColorLayerObject* self, PyObject* args);
static PyObject* ColorLayer_get_z_index(PyColorLayerObject* self, void* closure);
static int ColorLayer_set_z_index(PyColorLayerObject* self, PyObject* value, void* closure);
static PyObject* ColorLayer_get_visible(PyColorLayerObject* self, void* closure);

View File

@ -0,0 +1,204 @@
#!/usr/bin/env python3
"""
Perspective Patrol Demo
=======================
Demonstrates the FOV/perspective system with an animated patrolling entity.
Features:
- 20x20 grid with 10x10 opaque obstacle in center
- Entity patrols around the obstacle in a square pattern
- ColorLayer shows fog of war (visible/discovered/unknown)
- Press 'R' to reset vision (shows unknown vs discovered difference)
- Press 'Space' to pause/resume patrol
"""
import mcrfpy
# Patrol waypoints (clockwise around the center obstacle)
WAYPOINTS = [
(3, 3), # Top-left
(16, 3), # Top-right
(16, 16), # Bottom-right
(3, 16), # Bottom-left
]
# State
current_waypoint = 0
patrol_paused = False
move_timer_ms = 150 # Time between moves
# Global references
g_grid = None
g_patrol = None
g_fov_layer = None
def setup_scene():
"""Create the demo scene"""
global g_grid, g_patrol, g_fov_layer
mcrfpy.createScene("patrol_demo")
mcrfpy.setScene("patrol_demo")
ui = mcrfpy.sceneUI("patrol_demo")
# Title
title = mcrfpy.Caption(text="Perspective Patrol Demo", pos=(10, 10))
title.fill_color = mcrfpy.Color(255, 255, 255)
ui.append(title)
# Instructions
instructions = mcrfpy.Caption(text="[R] Reset vision [Space] Pause/Resume [Q] Quit", pos=(10, 35))
instructions.fill_color = mcrfpy.Color(180, 180, 180)
ui.append(instructions)
# Create grid (20x20, each cell 24px) - centered in 1024x768 window
grid_size_px = 480
grid = mcrfpy.Grid(
pos=((1024 - grid_size_px) // 2, (768 - grid_size_px) // 2),
size=(grid_size_px, grid_size_px),
grid_size=(20, 20),
texture=None
)
grid.center = (10*16, 10*16)
grid.fill_color = mcrfpy.Color(40, 40, 50) # Dark floor background
ui.append(grid)
# Set FOV settings
grid.fov = mcrfpy.FOV.SHADOW
grid.fov_radius = 8
# Initialize all cells as walkable/transparent (floor)
for y in range(20):
for x in range(20):
point = grid.at(x, y)
point.walkable = True
point.transparent = True
# Create 10x10 obstacle box in center (cells 5-14 in both dimensions)
for y in range(5, 15):
for x in range(5, 15):
point = grid.at(x, y)
point.walkable = False
point.transparent = False
# Create a color layer for the walls (so we can see them)
wall_layer = grid.add_layer('color', z_index=-2)
wall_layer.fill((40, 40, 50, 255)) # Match floor color
# Draw walls on the wall layer
for y in range(5, 15):
for x in range(5, 15):
wall_layer.set(x, y, mcrfpy.Color(100, 70, 50, 255)) # Brown walls
# Create FOV layer (above walls, below entities)
fov_layer = grid.add_layer('color', z_index=-1)
fov_layer.fill((0, 0, 0, 255)) # Start completely black (unknown)
# Create patrolling entity
patrol = mcrfpy.Entity(WAYPOINTS[0])
patrol.sprite_index = 64 # '@' character typically
grid.entities.append(patrol)
# Bind FOV layer to entity
fov_layer.apply_perspective(
entity=patrol,
visible=(0, 0, 0, 0), # Fully transparent when visible
discovered=(20, 20, 40, 180), # Dark blue-gray when discovered
unknown=(0, 0, 0, 255) # Black when never seen
)
# Initial visibility update
patrol.update_visibility()
# Store references for timer callbacks
g_grid = grid
g_patrol = patrol
g_fov_layer = fov_layer
# Status caption (below centered grid)
status = mcrfpy.Caption(text="Status: Patrolling", pos=(10, 720))
status.fill_color = mcrfpy.Color(100, 255, 100)
status.name = "status"
ui.append(status)
# Set up keyboard handler
mcrfpy.keypressScene(on_keypress)
# Start patrol timer
mcrfpy.setTimer("patrol", patrol_step, move_timer_ms)
def patrol_step(runtime):
"""Move entity one step toward current waypoint"""
global current_waypoint, patrol_paused
if patrol_paused:
return
# Get current position and target
px, py = int(g_patrol.x), int(g_patrol.y)
tx, ty = WAYPOINTS[current_waypoint]
# Calculate direction
dx = 0 if tx == px else (1 if tx > px else -1)
dy = 0 if ty == py else (1 if ty > py else -1)
# Move one step (prefer horizontal, then vertical)
if dx != 0:
g_patrol.x = px + dx
elif dy != 0:
g_patrol.y = py + dy
# Update visibility after move
g_patrol.update_visibility()
# Check if reached waypoint
if int(g_patrol.x) == tx and int(g_patrol.y) == ty:
current_waypoint = (current_waypoint + 1) % len(WAYPOINTS)
update_status(f"Reached waypoint, heading to {WAYPOINTS[current_waypoint]}")
def on_keypress(key, state):
"""Handle keyboard input"""
global patrol_paused
if state != "start":
return
if key == "R":
reset_vision()
elif key == "Space":
patrol_paused = not patrol_paused
if patrol_paused:
update_status("Status: PAUSED")
else:
update_status("Status: Patrolling")
elif key == "Q":
mcrfpy.setScene(None)
def reset_vision():
"""Reset entity's discovered state to demonstrate unknown vs discovered"""
global g_patrol, g_fov_layer
# Clear entity's gridstate (forget everything)
for state in g_patrol.gridstate:
state.visible = False
state.discovered = False
# Re-fill the layer with unknown color
g_fov_layer.fill((0, 0, 0, 255))
# Update visibility from current position (will mark current FOV as visible)
g_patrol.update_visibility()
update_status("Vision RESET - watch discovered vs unknown!")
def update_status(text):
"""Update status caption"""
ui = mcrfpy.sceneUI("patrol_demo")
for element in ui:
if hasattr(element, 'name') and element.name == "status":
element.text = text
break
# Run the demo
setup_scene()