External Reflexes¶
In Event-Driven Cognition, we used a lightweight detector to wake a heavy VLM – intelligence reacting to the world. In this recipe, we apply the same pattern to the navigation layer: the robot transitions from idle patrol to active person-following the moment a human appears in the camera feed.
This is an External Reflex – an event triggered by the environment (not an internal failure) that reconfigures the robot’s behavior at runtime.
The Strategy¶
Reflex (Vision Component): A lightweight detector runs on every frame, scanning for “person”.
Event (The Trigger): Fires when “person” first appears in the detection labels.
Response (Controller Reconfiguration): Two actions execute in sequence:
Switch the Controller’s algorithm to
VisionRGBDFollowerSend a goal to the Controller’s ActionServer to begin tracking
Step 1: The Vision Detector¶
We use the Vision component from the intelligence layer with a small embedded classifier – fast enough to process every frame.
from agents.components import Vision
from agents.config import VisionConfig
from agents.ros import Topic
image0 = Topic(name="/camera/rgbd", msg_type="RGBD")
detections_topic = Topic(name="detections", msg_type="Detections")
detection_config = VisionConfig(threshold=0.5, enable_local_classifier=True)
vision = Vision(
inputs=[image0],
outputs=[detections_topic],
trigger=image0,
config=detection_config,
component_name="detection_component",
)
Step 2: Define the Event¶
We use on_change=True so the event fires only when “person” first appears in the detection labels – not continuously while a person remains in frame.
from kompass.ros import Event
event_person_detected = Event(
event_condition=detections_topic.msg.labels.contains("person"),
on_change=True
)
Step 3: Define the Actions¶
When the event fires, two actions execute in sequence:
Switch algorithm – reconfigure the Controller from its current mode to
VisionRGBDFollowerTrigger the ActionServer – send a goal specifying “person” as the tracking target
from kompass.actions import update_parameter, send_component_action_server_goal
from kompass_interfaces.action import TrackVisionTarget
# Action 1: Switch the controller algorithm
switch_algorithm_action = update_parameter(
component=controller,
param_name="algorithm",
new_value="VisionRGBDFollower"
)
# Action 2: Send a tracking goal to the controller's action server
action_request_msg = TrackVisionTarget.Goal()
action_request_msg.label = "person"
action_start_person_following = send_component_action_server_goal(
component=controller,
request_msg=action_request_msg,
)
Tip
Linking an Event to a list of Actions executes them in sequence. This lets you chain reconfiguration steps – switch algorithm first, then send the goal.
Step 4: Wire and Launch¶
events_action = {
event_person_detected: [switch_algorithm_action, action_start_person_following]
}
Complete Recipe¶
1import numpy as np
2from agents.components import Vision
3from agents.config import VisionConfig
4from agents.ros import Topic
5from kompass.components import Controller, ControllerConfig, DriveManager, LocalMapper
6from kompass.robot import (
7 AngularCtrlLimits, LinearCtrlLimits, RobotGeometry, RobotType, RobotConfig,
8)
9from kompass.ros import Launcher, Event
10from kompass.actions import update_parameter, send_component_action_server_goal
11from kompass_interfaces.action import TrackVisionTarget
12
13# --- Vision Detector ---
14image0 = Topic(name="/camera/rgbd", msg_type="RGBD")
15detections_topic = Topic(name="detections", msg_type="Detections")
16
17detection_config = VisionConfig(threshold=0.5, enable_local_classifier=True)
18vision = Vision(
19 inputs=[image0],
20 outputs=[detections_topic],
21 trigger=image0,
22 config=detection_config,
23 component_name="detection_component",
24)
25
26# --- Robot Configuration ---
27my_robot = RobotConfig(
28 model_type=RobotType.ACKERMANN,
29 geometry_type=RobotGeometry.Type.CYLINDER,
30 geometry_params=np.array([0.1, 0.3]),
31 ctrl_vx_limits=LinearCtrlLimits(max_vel=1.0, max_acc=3.0, max_decel=2.5),
32 ctrl_omega_limits=AngularCtrlLimits(
33 max_vel=4.0, max_acc=6.0, max_decel=10.0, max_steer=np.pi / 3
34 ),
35)
36
37# --- Navigation Components ---
38depth_cam_info_topic = Topic(
39 name="/camera/aligned_depth_to_color/camera_info", msg_type="CameraInfo"
40)
41
42config = ControllerConfig(ctrl_publish_type="Parallel")
43controller = Controller(component_name="controller", config=config)
44controller.inputs(
45 vision_detections=detections_topic,
46 depth_camera_info=depth_cam_info_topic,
47)
48controller.algorithm = "VisionRGBDFollower"
49controller.direct_sensor = False
50
51driver = DriveManager(component_name="driver")
52mapper = LocalMapper(component_name="local_mapper")
53
54# --- Event: Person Detected ---
55event_person_detected = Event(
56 event_condition=detections_topic.msg.labels.contains("person"),
57 on_change=True,
58)
59
60# --- Actions: Switch Algorithm + Start Following ---
61switch_algorithm_action = update_parameter(
62 component=controller,
63 param_name="algorithm",
64 new_value="VisionRGBDFollower",
65)
66
67action_request_msg = TrackVisionTarget.Goal()
68action_request_msg.label = "person"
69action_start_person_following = send_component_action_server_goal(
70 component=controller,
71 request_msg=action_request_msg,
72)
73
74events_action = {
75 event_person_detected: [switch_algorithm_action, action_start_person_following],
76}
77
78# --- Launch ---
79launcher = Launcher()
80
81launcher.add_pkg(
82 components=[vision],
83 ros_log_level="warn",
84 package_name="automatika_embodied_agents",
85 executable_entry_point="executable",
86 multiprocessing=True,
87)
88
89launcher.kompass(
90 components=[controller, mapper, driver],
91 events_actions=events_action,
92)
93
94launcher.robot = my_robot
95launcher.bringup()