Working calibration

This commit is contained in:
cristhian aguilera
2026-01-30 16:40:06 -03:00
parent 610c43e16d
commit 61bc384826
17 changed files with 1717 additions and 6 deletions

52
AGENTS.md Normal file
View File

@@ -0,0 +1,52 @@
AGENTS.md
Purpose
Provide project guidelines for agents working in this repository.
Scope
This repository uses Dora, Python, C++, and Rust. Prefer clear, easy-to-understand code and incremental implementation with verification.
General Guidelines
- Keep external dependencies to a minimum. Prefer standard libraries and existing project dependencies.
- Make changes incrementally. Implement small steps, then test or verify before continuing.
- Document assumptions and hardware considerations when relevant.
- Favor readability: simple control flow, meaningful names, and small functions.
Robot Safety
- This is a real robot project. Avoid changes that could cause unsafe motion or unsafe behavior.
- Add or preserve safeguards (limits, checks, bounds) for sensors and actuators.
- If a change affects motion control or safety, call it out explicitly and propose a validation plan.
Language-Specific Rules
Python
- Follow PEP 8 style; use type hints where helpful.
- Prefer dataclasses for structured data.
- Avoid unnecessary abstractions.
C++
- Follow existing project style; default to modern C++ (C++17 or later) unless constrained.
- Prefer RAII and value semantics.
- Avoid raw ownership; use smart pointers where needed.
Rust
- Use idiomatic Rust; prefer ownership/borrowing clarity over complex lifetimes.
- Favor explicit error handling with Result and ?.
- Keep modules small and focused.
Dora
- Keep dataflow definitions simple and explicit.
- Prefer small nodes with clear inputs/outputs.
- Validate node I/O types and message schemas.
Testing and Verification
- Add or update tests when feasible.
- For robot-facing changes, provide a staged validation plan:
1) Unit or simulation check (if available).
2) Dry-run or limited-scope run.
3) Full system run with safety monitoring.
Review Checklist
- Dependencies minimized
- Code readable and consistent with language rules
- Incremental changes validated
- Safety considerations addressed

View File

@@ -0,0 +1,31 @@
---
calibration_poses:
- position: {x: 0.25, y: 0.03, z: 0.39}
orientation_rpy_deg: {roll: -180.00, pitch: 1.00, yaw: 2.00}
- position: {x: 0.24, y: 0.06, z: 0.45}
orientation_rpy_deg: {roll: -180.00, pitch: 2.00, yaw: 1.00}
- position: {x: 0.24, y: 0.05, z: 0.45}
orientation_rpy_deg: {roll: +180.00, pitch: 1.00, yaw: 47.0}
- position: {x: 0.24, y: -0.08, z: 0.45}
orientation_rpy_deg: {roll: -180.00, pitch: 1.00, yaw: -47.0}
- position: {x: 0.24, y: 0.07, z: 0.50}
orientation_rpy_deg: {roll: -180.00, pitch: 4.00, yaw: 4.00}
- position: {x: 0.19, y: -0.20, z: 0.38}
orientation_rpy_deg: {roll: -151.00, pitch: 0.00, yaw: 18.0}
- position: {x: 0.25, y: 0.13, z: 0.39}
orientation_rpy_deg: {roll: 156.00, pitch: 5.80, yaw: 25.1}
- position: {x: 0.26, y: 0.14, z: 0.31}
orientation_rpy_deg: {roll: 161.00, pitch: 10.00, yaw: 20.00}
- position: {x: 0.13, y: -0.03, z: 0.36}
orientation_rpy_deg: {roll: 176.00, pitch: -21.00, yaw: 5.00}
- position: {x: 0.33, y: 0.01, z: 0.36}
orientation_rpy_deg: {roll: 180.00, pitch: 16.00, yaw: -2.00}

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,63 @@
nodes:
- id: zed_camera_cpp
build: bash -lc "cmake -S dora_zed_cpp -B dora_zed_cpp/build && cmake --build dora_zed_cpp/build"
path: dora_zed_cpp/build/dora_zed_cpp
env:
ZED_RESOLUTION: "720"
ZED_FPS: "15"
ZED_DEPTH_MODE: "NEURAL"
ZED_DEPTH_MIN_MM: "10"
ZED_DEPTH_MAX_MM: "600"
ZED_DEPTH_FILL: "false"
ZED_FLIP: "ON"
ZED_WARMUP_FRAMES: "30"
inputs:
tick: dora/timer/millis/100
outputs:
- image_bgr
- camera_info
- id: ulite6
build: uv pip install -e dora_ulite6
path: dora_ulite6/dora_ulite6/main.py
inputs:
tick: dora/timer/millis/10
command: calibration/robot_cmd
outputs:
- tcp_pose
- status
env:
ROBOT_IP: "192.168.1.192"
DEFAULT_SPEED: "30"
DEFAULT_UNITS: "mm"
API_HOST: "0.0.0.0"
API_PORT: "9000"
VACUUM_ENABLED: "false"
- id: calibration
build: |
uv venv -p 3.12 --seed --allow-existing
uv pip install -e dora_calibration
path: dora_calibration/dora_calibration/main.py
env:
VIRTUAL_ENV: ./.venv
IMAGE_INPUT: "image_bgr"
CAMERA_INFO_INPUT: "camera_info"
POSE_INPUT: "tcp_pose"
STATUS_INPUT: "status"
COMMAND_OUTPUT: "robot_cmd"
POSES_FILE: "config/calibration_poses_ulite6.yml"
OUTPUT_FILE: "calibration_ulite6_zed.npz"
METHOD: "TSAI"
SQUARES_X: "4"
SQUARES_Y: "6"
SQUARE_LENGTH: "0.04"
MARKER_LENGTH: "0.03"
WAIT_AFTER_MOVE: "2.0"
WAIT_BETWEEN_POSES: "2.0"
inputs:
image_bgr: zed_camera_cpp/image_bgr
camera_info: zed_camera_cpp/camera_info
tcp_pose: ulite6/tcp_pose
status: ulite6/status
tick: dora/timer/millis/100
outputs:
- robot_cmd

View File

@@ -13,5 +13,5 @@ nodes:
DEFAULT_SPEED: "30"
DEFAULT_UNITS: "mm"
API_HOST: "0.0.0.0"
API_PORT: "8080"
API_PORT: "9000"
VACUUM_ENABLED: "true"

View File

@@ -0,0 +1,50 @@
# Dora Calibration Node
Minimal eye-in-hand calibration node for a camera mounted on the robot gripper.
## Inputs
- `image` (configurable via `IMAGE_INPUT`): image stream with metadata containing `encoding` and `shape` or `width/height`.
- `camera_info` (optional): `[fx, fy, cx, cy]` float array with distortion in metadata.
- `tcp_pose`: `[x, y, z, roll, pitch, yaw]` from `dora_ulite6` in mm/deg.
## Controls (OpenCV Window)
- `q` quit
- `r` restart calibration (clear observations and start over)
## Environment Variables
| Variable | Default | Description |
| --- | --- | --- |
| `IMAGE_INPUT` | `image` | Image input name |
| `CAMERA_INFO_INPUT` | `camera_info` | Camera info input name |
| `POSE_INPUT` | `tcp_pose` | Robot pose input name |
| `STATUS_INPUT` | `status` | Robot status input name (used for command completion) |
| `COMMAND_OUTPUT` | `robot_cmd` | Output name for robot command messages |
| `POSES_FILE` | `config/calibration_poses.yml` | YAML file path for calibration poses |
| `POSE_UNITS` | `m` | Units for poses file (`m` or `mm`) |
| `MOVE_SPEED` | _(unset)_ | Move speed override (mm/s) |
| `OUTPUT_FILE` | `calibration.npz` | Output calibration file (observations saved as `{name}_observations.npz`) |
| `METHOD` | `TSAI` | Hand-eye method (TSAI, PARK, HORAUD, ANDREFF, DANIILIDIS) |
| `SQUARES_X` | `4` | ChArUco board squares X |
| `SQUARES_Y` | `6` | ChArUco board squares Y |
| `SQUARE_LENGTH` | `0.04` | Square length (m) |
| `MARKER_LENGTH` | `0.03` | Marker length (m) |
| `AUTO_RUN` | `true` | Automatically iterate poses and capture |
| `WAIT_AFTER_MOVE` | `2.0` | Seconds to wait after a move before capture |
| `MOVE_HOME_ON_START` | `true` | Move to calibration home when starting |
| `HOME_X` | `200` | Calibration home X (mm) |
| `HOME_Y` | `0` | Calibration home Y (mm) |
| `HOME_Z` | `300` | Calibration home Z (mm) |
| `HOME_ROLL` | `180` | Calibration home roll (deg) |
| `HOME_PITCH` | `0` | Calibration home pitch (deg) |
| `HOME_YAW` | `0` | Calibration home yaw (deg) |
| `MIN_CORNERS` | `6` | Minimum ChArUco corners required for pose |
| `LOG_INTERVAL` | `2.0` | Seconds between console detection logs |
| `AUTO_SOLVE` | `true` | Automatically solve and save after last pose |
## Notes
- Calibration runs automatically: moves through poses, captures, and saves results.
- For safe operation, keep a clear workspace around the robot.

View File

@@ -0,0 +1 @@
"""Dora calibration node package."""

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,33 @@
[project]
name = "dora-calibration"
version = "0.1.0"
license = { file = "MIT" }
authors = [{ name = "Dora" }]
description = "Dora node for eye-in-hand calibration with minimal OpenCV UI"
requires-python = ">=3.8"
dependencies = [
"dora-rs >= 0.3.9",
"numpy < 2.0.0",
"opencv-contrib-python >= 4.1.1",
"pyyaml >= 6.0",
]
[dependency-groups]
dev = ["pytest >=8.1.1", "ruff >=0.9.1"]
[project.scripts]
dora-calibration = "dora_calibration.main:main"
[tool.ruff.lint]
extend-select = [
"D",
"UP",
"PERF",
"RET",
"RSE",
"NPY",
"N",
"I",
]

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env python3
"""Analyze saved observations for issues."""
import argparse
import numpy as np
from scipy.spatial.transform import Rotation
def main():
parser = argparse.ArgumentParser(description="Analyze hand-eye observations for issues.")
parser.add_argument("observations", nargs="?", default="calibration_observations.npz",
help="Path to observations .npz file")
args = parser.parse_args()
# Load dora observations
dora_obs = np.load(args.observations, allow_pickle=True)
print("=== OBSERVATIONS ANALYSIS ===")
print(f"File: {args.observations}")
print(f"Keys: {list(dora_obs.keys())}")
R_gripper2base = dora_obs['R_gripper2base']
t_gripper2base = dora_obs['t_gripper2base']
R_target2cam = dora_obs['R_target2cam']
t_target2cam = dora_obs['t_target2cam']
n = len(R_gripper2base)
print(f"Count: {n}")
print("\n=== GRIPPER POSES (base_T_gripper) ===")
for i in range(n):
R = R_gripper2base[i]
t = t_gripper2base[i].flatten()
# Check rotation matrix validity
det = np.linalg.det(R)
orth = np.allclose(R @ R.T, np.eye(3), atol=1e-6)
rpy = Rotation.from_matrix(R).as_euler('xyz', degrees=True)
print(f"\nObs {i+1}:")
print(f" det(R)={det:.6f}, orthogonal={orth}")
print(f" t(mm)=[{t[0]*1000:.1f}, {t[1]*1000:.1f}, {t[2]*1000:.1f}]")
print(f" RPY(deg)=[{rpy[0]:.1f}, {rpy[1]:.1f}, {rpy[2]:.1f}]")
if not orth:
print(" WARNING: Rotation matrix not orthogonal!")
if abs(det - 1.0) > 1e-6:
print(f" WARNING: Rotation matrix determinant is {det}, expected 1.0!")
print("\n=== TARGET POSES (cam_T_target) ===")
for i in range(n):
R = R_target2cam[i]
t = t_target2cam[i].flatten()
det = np.linalg.det(R)
orth = np.allclose(R @ R.T, np.eye(3), atol=1e-6)
print(f"\nObs {i+1}:")
print(f" det(R)={det:.6f}, orthogonal={orth}")
print(f" t(m)=[{t[0]:.4f}, {t[1]:.4f}, {t[2]:.4f}]")
print(f" distance from camera: {np.linalg.norm(t)*1000:.1f}mm")
# Check for pose diversity (important for calibration)
print("\n=== POSE DIVERSITY CHECK ===")
positions = np.array([t_gripper2base[i].flatten() for i in range(n)])
pos_range = positions.max(axis=0) - positions.min(axis=0)
print(f"Position range (mm): X={pos_range[0]*1000:.1f}, Y={pos_range[1]*1000:.1f}, Z={pos_range[2]*1000:.1f}")
rotations = np.array([Rotation.from_matrix(R_gripper2base[i]).as_euler('xyz', degrees=True) for i in range(n)])
rot_range = rotations.max(axis=0) - rotations.min(axis=0)
print(f"Rotation range (deg): Roll={rot_range[0]:.1f}, Pitch={rot_range[1]:.1f}, Yaw={rot_range[2]:.1f}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,114 @@
#!/usr/bin/env python3
"""Compare dora calibration with reference calibration."""
import argparse
import numpy as np
from scipy.spatial.transform import Rotation
def load_calibration(path):
"""Load calibration from .npz file."""
data = np.load(path, allow_pickle=True)
result = {"T_cam2gripper": data["T_cam2gripper"]}
if "camera_matrix" in data:
result["camera_matrix"] = data["camera_matrix"]
if "distortion" in data:
result["distortion"] = data["distortion"]
return result
def analyze_transform(T, name):
"""Print analysis of a 4x4 transformation matrix."""
print(f"\n=== {name} ===")
print(f"Translation (mm): [{T[0,3]*1000:.2f}, {T[1,3]*1000:.2f}, {T[2,3]*1000:.2f}]")
R = T[:3, :3]
rpy = Rotation.from_matrix(R).as_euler('xyz', degrees=True)
print(f"RPY (deg): [{rpy[0]:.2f}, {rpy[1]:.2f}, {rpy[2]:.2f}]")
print(f"Full matrix:\n{T}")
def analyze_camera(cam_matrix, name):
"""Print camera intrinsics analysis."""
print(f"\n=== {name} Camera Intrinsics ===")
fx, fy = cam_matrix[0, 0], cam_matrix[1, 1]
cx, cy = cam_matrix[0, 2], cam_matrix[1, 2]
print(f"fx={fx:.2f}, fy={fy:.2f}, cx={cx:.2f}, cy={cy:.2f}")
# Sanity checks
issues = []
if fx < 100 or fy < 100:
issues.append(f"Focal lengths too small (fx={fx:.1f}, fy={fy:.1f})")
if cx < 100 or cy < 100:
issues.append(f"Principal point too small (cx={cx:.1f}, cy={cy:.1f})")
if abs(fx - fy) > 50:
issues.append(f"Large focal length asymmetry: |fx-fy|={abs(fx-fy):.1f}")
if issues:
print("WARNINGS:")
for issue in issues:
print(f" - {issue}")
else:
print("Intrinsics appear valid")
def main():
parser = argparse.ArgumentParser(description="Compare dora vs reference calibration.")
parser.add_argument("--dora", default="calibration.npz",
help="Path to dora calibration result")
parser.add_argument("--reference", default="reference_calibration.npz",
help="Path to reference calibration result")
parser.add_argument("--check-intrinsics", action="store_true",
help="Also compare camera intrinsics")
args = parser.parse_args()
try:
dora_data = load_calibration(args.dora)
dora = dora_data["T_cam2gripper"]
except FileNotFoundError:
print(f"Dora calibration not found: {args.dora}")
return
try:
ref_data = load_calibration(args.reference)
ref = ref_data["T_cam2gripper"]
except FileNotFoundError:
print(f"Reference calibration not found: {args.reference}")
print("Analyzing dora calibration only:")
analyze_transform(dora, "Dora Calibration")
if args.check_intrinsics and "camera_matrix" in dora_data:
analyze_camera(dora_data["camera_matrix"], "Dora")
return
analyze_transform(dora, "Dora Calibration")
analyze_transform(ref, "Reference Calibration")
# Check camera intrinsics if requested
if args.check_intrinsics:
if "camera_matrix" in dora_data:
analyze_camera(dora_data["camera_matrix"], "Dora")
if "camera_matrix" in ref_data:
analyze_camera(ref_data["camera_matrix"], "Reference")
# Compute differences
print("\n=== DIFFERENCES ===")
trans_diff = np.linalg.norm(dora[:3,3] - ref[:3,3]) * 1000
print(f"Translation difference: {trans_diff:.2f} mm")
R_dora = dora[:3,:3]
R_ref = ref[:3,:3]
R_diff = R_dora @ R_ref.T
angle_diff = np.rad2deg(np.arccos(np.clip((np.trace(R_diff) - 1) / 2, -1, 1)))
print(f"Rotation difference: {angle_diff:.2f} deg")
# Thresholds for acceptable calibration
if trans_diff < 5.0 and angle_diff < 2.0:
print("\n[OK] Calibrations are within acceptable tolerance")
else:
print("\n[FAIL] Calibrations differ significantly!")
if trans_diff >= 5.0:
print(f" Translation diff {trans_diff:.2f}mm exceeds 5mm threshold")
if angle_diff >= 2.0:
print(f" Rotation diff {angle_diff:.2f} deg exceeds 2 deg threshold")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env python3
"""Recompute hand-eye calibration from an observations .npz file."""
import argparse
from typing import List
import cv2
import numpy as np
def _method_from_name(name: str) -> int:
name = name.upper()
methods = {
"TSAI": cv2.CALIB_HAND_EYE_TSAI,
"PARK": cv2.CALIB_HAND_EYE_PARK,
"HORAUD": cv2.CALIB_HAND_EYE_HORAUD,
"ANDREFF": cv2.CALIB_HAND_EYE_ANDREFF,
"DANIILIDIS": cv2.CALIB_HAND_EYE_DANIILIDIS,
}
return methods.get(name, cv2.CALIB_HAND_EYE_TSAI)
def main() -> None:
parser = argparse.ArgumentParser(description="Recompute hand-eye from observations npz.")
parser.add_argument("observations", help="*_observations.npz path")
parser.add_argument("--method", default="TSAI")
args = parser.parse_args()
data = np.load(args.observations, allow_pickle=True)
R_gripper2base: List[np.ndarray] = list(data["R_gripper2base"])
t_gripper2base: List[np.ndarray] = list(data["t_gripper2base"])
R_target2cam: List[np.ndarray] = list(data["R_target2cam"])
t_target2cam: List[np.ndarray] = list(data["t_target2cam"])
method = _method_from_name(args.method)
R_cam2gripper, t_cam2gripper = cv2.calibrateHandEye(
R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, method=method
)
T_cam2gripper = np.eye(4)
T_cam2gripper[:3, :3] = R_cam2gripper
T_cam2gripper[:3, 3] = t_cam2gripper.reshape(3)
base_targets = []
for i in range(len(R_gripper2base)):
base_T_gripper = np.eye(4)
base_T_gripper[:3, :3] = R_gripper2base[i]
base_T_gripper[:3, 3] = np.array(t_gripper2base[i]).reshape(3)
cam_T_target = np.eye(4)
cam_T_target[:3, :3] = R_target2cam[i]
cam_T_target[:3, 3] = np.array(t_target2cam[i]).reshape(3)
base_T_target = base_T_gripper @ T_cam2gripper @ cam_T_target
base_targets.append(base_T_target[:3, 3])
base_targets = np.array(base_targets)
mean_target = base_targets.mean(axis=0)
errors = np.linalg.norm(base_targets - mean_target[None, :], axis=1)
print("T_cam2gripper:")
print(T_cam2gripper)
print(f"Mean translation error: {errors.mean()*1000:.2f} mm")
print(f"Max translation error: {errors.max()*1000:.2f} mm")
if __name__ == "__main__":
main()

View File

@@ -11,6 +11,7 @@ Dora node for controlling a UFactory Lite6 robot via REST API, web UI, and publi
inputs:
tick: dora/timer/millis/10
outputs: [status, joint_pose, tcp_pose]
inputs: [command]
env:
ROBOT_IP: "192.168.1.192"
DEFAULT_SPEED: "30"
@@ -90,3 +91,12 @@ curl -X POST http://localhost:8080/api/move_to_pose \
- `status` - JSON: `{ok, action, message, timestamp_ns}`
- `joint_pose` - 6 joint angles in degrees
- `tcp_pose` - `[x, y, z, roll, pitch, yaw]` in mm/deg
## Dora Inputs
- `command` - JSON string: `{id, action, payload}`
- `id`: unique command id (required)
- `action`: `home`, `move_to`, or `move_to_pose`
- `payload`: fields for move (`x`, `y`, `z`, `roll`, `pitch`, `yaw`, `speed`)
`status` will include `command_id` for command responses.

View File

@@ -732,16 +732,55 @@ def _send_tcp_pose(node: Node, tcp_pose: Dict[str, float]) -> None:
node.send_output("tcp_pose", pa.array(vec), metadata=metadata)
def _send_error_status(node: Node, action: str, message: str) -> None:
def _send_error_status(
node: Node, action: str, message: str, status: Optional[Dict[str, Any]] = None
) -> None:
payload = {
"ok": False,
"action": action,
"message": message,
"timestamp_ns": time.time_ns(),
}
if status:
payload.update(status)
node.send_output("status", pa.array([json.dumps(payload)]))
def _send_command_status(
node: Node,
command_id: str,
action: str,
ok: bool,
message: str,
code: Optional[int] = None,
status: Optional[Dict[str, Any]] = None,
) -> None:
payload = {
"ok": ok,
"action": action,
"message": message,
"timestamp_ns": time.time_ns(),
"command_id": command_id,
}
if code is not None:
payload["code"] = code
if status:
payload.update(status)
node.send_output("status", pa.array([json.dumps(payload)]))
def _status_snapshot(helper: ULite6Helper) -> Dict[str, Any]:
status = helper.get_status()
return {
"state": status.get("state"),
"mode": status.get("mode"),
"error_code": status.get("error_code"),
"warn_code": status.get("warn_code"),
"has_error": status.get("has_error"),
"has_warn": status.get("has_warn"),
}
def main() -> None:
node = Node()
@@ -773,7 +812,107 @@ def main() -> None:
_send_joint_pose(node, joints)
_send_tcp_pose(node, tcp_pose)
except Exception as exc:
_send_error_status(node, "publish_state", str(exc))
_send_error_status(
node, "publish_state", str(exc), status=_status_snapshot(helper)
)
elif event["id"] == "command":
try:
value = event["value"]
raw = value[0].as_py() if len(value) else None
if not raw:
_send_command_status(
node,
command_id="",
action="command",
ok=False,
message="Empty command payload",
status=_status_snapshot(helper),
)
continue
payload = json.loads(raw)
except Exception as exc:
_send_command_status(
node,
command_id="",
action="command",
ok=False,
message=f"Invalid command payload: {exc}",
status=_status_snapshot(helper),
)
continue
command_id = str(payload.get("id", ""))
action = str(payload.get("action", ""))
data = payload.get("payload", {}) if isinstance(payload.get("payload", {}), dict) else {}
if not command_id:
_send_command_status(
node,
command_id="",
action=action or "command",
ok=False,
message="Missing command id",
status=_status_snapshot(helper),
)
continue
try:
if action == "home":
code = helper.go_home()
_send_command_status(
node,
command_id=command_id,
action=action,
ok=code == 0,
message="Home command executed",
code=code,
status=_status_snapshot(helper),
)
elif action in ("move_to", "move_to_pose"):
speed = float(data.get("speed", default_speed))
x = float(data.get("x", 0.0))
y = float(data.get("y", 0.0))
z = float(data.get("z", 0.0))
roll = float(data.get("roll", 180.0))
pitch = float(data.get("pitch", 0.0))
yaw = float(data.get("yaw", 0.0))
code = helper.move_to_pose(
x,
y,
z,
roll,
pitch,
yaw,
speed,
default_units,
)
_send_command_status(
node,
command_id=command_id,
action=action,
ok=code == 0,
message="Move command executed",
code=code,
status=_status_snapshot(helper),
)
else:
_send_command_status(
node,
command_id=command_id,
action=action or "command",
ok=False,
message=f"Unknown action: {action}",
status=_status_snapshot(helper),
)
except Exception as exc:
_send_command_status(
node,
command_id=command_id,
action=action or "command",
ok=False,
message=str(exc),
status=_status_snapshot(helper),
)
if __name__ == "__main__":

View File

@@ -1,11 +1,11 @@
set(DORA_ROOT_DIR "/home/cristhian/workspace/garbage/dora" CACHE FILEPATH "Path to the root of dora")
set(DORA_ROOT_DIR "" CACHE FILEPATH "Path to the root of dora")
set(dora_c_include_dir "${CMAKE_CURRENT_BINARY_DIR}/include/c")
set(dora_cxx_include_dir "${CMAKE_CURRENT_BINARY_DIR}/include/cxx")
set(node_bridge "${CMAKE_CURRENT_BINARY_DIR}/node_bridge.cc")
if(DORA_ROOT_DIR)
if(DORA_ROOT_DIR AND EXISTS "${DORA_ROOT_DIR}/Cargo.toml")
include(ExternalProject)
ExternalProject_Add(Dora
SOURCE_DIR ${DORA_ROOT_DIR}
@@ -40,10 +40,28 @@ if(DORA_ROOT_DIR)
set(dora_link_dirs ${DORA_ROOT_DIR}/target/debug)
else()
include(ExternalProject)
set(DORA_GIT_TAG "main")
find_program(DORA_CLI dora)
if(DORA_CLI)
execute_process(
COMMAND ${DORA_CLI} --version
RESULT_VARIABLE _dora_version_result
OUTPUT_VARIABLE _dora_version_out
ERROR_VARIABLE _dora_version_err
OUTPUT_STRIP_TRAILING_WHITESPACE
)
if(_dora_version_result EQUAL 0)
string(REGEX MATCH "([0-9]+\\.[0-9]+\\.[0-9]+)" _dora_version_match "${_dora_version_out}")
if(_dora_version_match)
set(DORA_GIT_TAG "v${CMAKE_MATCH_1}")
endif()
endif()
endif()
ExternalProject_Add(Dora
PREFIX ${CMAKE_CURRENT_BINARY_DIR}/dora
GIT_REPOSITORY https://github.com/dora-rs/dora.git
GIT_TAG main
GIT_TAG ${DORA_GIT_TAG}
BUILD_IN_SOURCE True
CONFIGURE_COMMAND ""
BUILD_COMMAND

View File

@@ -189,6 +189,7 @@ int main() {
auto [fx, fy, cx, cy, distortion] = reload_calibration();
int64_t frame_id = 0;
while (true) {
auto event = dora_node.events->next();
auto type = event_type(event);
@@ -237,6 +238,12 @@ int main() {
image_metadata->set_float("cx", cx);
image_metadata->set_float("cy", cy);
image_metadata->set_list_float("distortion", make_distortion(distortion));
image_metadata->set_int("frame_id", frame_id++);
image_metadata->set_int("timestamp_ns", static_cast<int64_t>(
std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::steady_clock::now().time_since_epoch()
).count()
));
auto image_result = send_output_with_metadata(
dora_node.send_output, "image_bgr", image_slice, std::move(image_metadata));