Skip to content

Commit

Permalink
pr updates
Browse files Browse the repository at this point in the history
  • Loading branch information
Serafadam committed Jan 13, 2025
1 parent 9e0a58b commit 641df1c
Show file tree
Hide file tree
Showing 17 changed files with 258 additions and 91 deletions.
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -430,6 +430,7 @@ set(TARGET_CORE_SOURCES
src/pipeline/datatype/BenchmarkReport.cpp
src/pipeline/datatype/PointCloudConfig.cpp
src/pipeline/datatype/PointCloudData.cpp
src/pipeline/datatype/RGBDData.cpp
src/pipeline/datatype/MessageGroup.cpp
src/pipeline/datatype/TransformData.cpp
src/utility/H26xParsers.cpp
Expand Down
39 changes: 39 additions & 0 deletions bindings/python/src/pipeline/datatype/RGBDDataBindings.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@

#include "DatatypeBindings.hpp"
#include "pipeline/CommonBindings.hpp"
#include <unordered_map>
#include <memory>

// depthai
#include "depthai/pipeline/datatype/RGBDData.hpp"

//pybind
#include <pybind11/chrono.h>
#include <pybind11/numpy.h>

void bind_transformdata(pybind11::module& m, void* pCallstack){

using namespace dai;

py::class_<RGBDData, Py<RGBDData>, Buffer, std::shared_ptr<RGBDData>> rgbdData(m, "RGBDData", DOC(dai, RGBDData));

///////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////
// Call the rest of the type defines, then perform the actual bindings
Callstack* callstack = (Callstack*) pCallstack;
auto cb = callstack->top();
callstack->pop();
cb(m, pCallstack);
// Actual bindings
///////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////

// Metadata / raw
rgbdData
.def(py::init<>())
.def("__repr__", &RGBDData::str)
.def_readwrite("rgbFrame", &RGBDData::rgbFrame, DOC(dai, RGBDData, rgbFrame))
.def_readwrite("depthFrame", &RGBDData::depthFrame, DOC(dai, RGBDData, depthFrame));
}
2 changes: 1 addition & 1 deletion bindings/python/src/pipeline/node/RGBDBindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ void bind_rgbd(pybind11::module& m, void* pCallstack) {
py::arg("autocreate"),
py::arg("size"),
DOC(dai, node, RGBD, build, 2))
.def("setOutputMeters", &RGBD::setOutputMeters, py::arg("outputMeters"), DOC(dai, node, RGBD, setOutputMeters))
.def("setDepthUnits", &RGBD::setDepthUnit, py::arg("units"), DOC(dai, node, RGBD, setDepthUnit))
.def("useCPU", &RGBD::useCPU, DOC(dai, node, RGBD, useCPU))
.def("useCPUMT", &RGBD::useCPUMT, py::arg("numThreads") = 2, DOC(dai, node, RGBD, useCPUMT))
.def("useGPU", &RGBD::useGPU, py::arg("device") = 0, DOC(dai, node, RGBD, useGPU))
Expand Down
4 changes: 2 additions & 2 deletions examples/cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -298,10 +298,10 @@ dai_set_example_test_labels(visualizer_yolo ondevice rvc2_all rvc4)
include(FetchContent)
FetchContent_Declare(rerun_sdk URL https://github.com/rerun-io/rerun/releases/download/0.16.1/rerun_cpp_sdk.zip)
FetchContent_MakeAvailable(rerun_sdk)
dai_add_example(rgbd HostNodes/rgbd.cpp OFF OFF)
dai_add_example(rgbd RGBD/rgbd.cpp ON OFF)
target_link_libraries(rgbd PRIVATE rerun_sdk)

dai_add_example(visualizer_rgbd Visualizer/visualizer_rgbd.cpp OFF OFF)
dai_add_example(visualizer_rgbd RGBD/visualizer_rgbd.cpp ON OFF)

if(DEPTHAI_RTABMAP_SUPPORT)
include(FetchContent)
Expand Down
36 changes: 20 additions & 16 deletions examples/cpp/HostNodes/rgbd.cpp → examples/cpp/RGBD/rgbd.cpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@

#include "depthai/depthai.hpp"
#include "rerun.hpp"
#include "rerun/archetypes/depth_image.hpp"

rerun::Collection<rerun::TensorDimension> tensorShape(const cv::Mat& img) {
return {size_t(img.rows), size_t(img.cols), size_t(img.channels())};
};
class RerunNode : public dai::NodeCRTP<dai::node::ThreadedHostNode, RerunNode> {
public:
constexpr static const char* NAME = "RerunNode";
Expand All @@ -10,13 +13,15 @@ class RerunNode : public dai::NodeCRTP<dai::node::ThreadedHostNode, RerunNode> {
void build() {}

Input inputPCL{*this, {.name = "inPCL", .types = {{dai::DatatypeEnum::PointCloudData, true}}}};
Input inputRGBD{*this, {.name = "inRGBD", .types = {{dai::DatatypeEnum::RGBDData, true}}}};

void run() override {
const auto rec = rerun::RecordingStream("rerun");
rec.spawn().exit_on_failure();
rec.log_static("world", rerun::ViewCoordinates::FLU);
while(isRunning()) {
auto pclIn = inputPCL.get<dai::PointCloudData>();
auto rgbdIn = inputRGBD.get<dai::RGBDData>();
if(pclIn != nullptr) {
std::vector<rerun::Position3D> points;
std::vector<rerun::Color> colors;
Expand All @@ -29,6 +34,10 @@ class RerunNode : public dai::NodeCRTP<dai::node::ThreadedHostNode, RerunNode> {
colors.emplace_back(pclData[i].r, pclData[i].g, pclData[i].b);
}
rec.log("world/obstacle_pcl", rerun::Points3D(points).with_colors(colors).with_radii({0.01f}));
auto colorFrame = rgbdIn->rgbFrame.getCvFrame();
cv::cvtColor(colorFrame, colorFrame, cv::COLOR_BGR2RGB);
rec.log("rgb",
rerun::Image(tensorShape(colorFrame), reinterpret_cast<const uint8_t*>(colorFrame.data)));
}
}
}
Expand All @@ -38,22 +47,16 @@ int main() {
// Create pipeline
dai::Pipeline pipeline;
// Define sources and outputs
int fps = 30;
// Define sources and outputs
auto left = pipeline.create<dai::node::MonoCamera>();
auto right = pipeline.create<dai::node::MonoCamera>();
auto left = pipeline.create<dai::node::Camera>();
auto right = pipeline.create<dai::node::Camera>();
auto stereo = pipeline.create<dai::node::StereoDepth>();
auto rgbd = pipeline.create<dai::node::RGBD>()->build();
auto color = pipeline.create<dai::node::Camera>();
auto rerun = pipeline.create<RerunNode>();
color->build();

left->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
left->setCamera("left");
left->setFps(fps);
right->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
right->setCamera("right");
right->setFps(fps);
left->build(dai::CameraBoardSocket::LEFT);
right->build(dai::CameraBoardSocket::RIGHT);
stereo->setSubpixel(true);
stereo->setExtendedDisparity(false);
stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::DEFAULT);
Expand All @@ -62,19 +65,20 @@ int main() {
stereo->enableDistortionCorrection(true);
stereo->initialConfig.setLeftRightCheckThreshold(10);
stereo->initialConfig.postProcessing.thresholdFilter.maxRange = 10000;
rgbd->setOutputMeters(true);
rgbd->setDepthUnit(dai::StereoDepthConfig::AlgorithmControl::DepthUnit::METER);

auto* out = color->requestOutput(std::pair<int, int>(1280, 720), dai::ImgFrame::Type::RGB888i);

auto *out = color->requestOutput(std::pair<int, int>(1280, 720), dai::ImgFrame::Type::RGB888i);

out->link(stereo->inputAlignTo);
left->out.link(stereo->left);
right->out.link(stereo->right);
left->requestOutput(std::pair<int, int>(1280, 729))->link(stereo->left);
right->requestOutput(std::pair<int, int>(1280, 729))->link(stereo->right);

stereo->depth.link(rgbd->inDepth);
out->link(rgbd->inColor);

// Linking
rgbd->pcl.link(rerun->inputPCL);
rgbd->rgbd.link(rerun->inputRGBD);
pipeline.start();
auto device = pipeline.getDefaultDevice();
device->setIrLaserDotProjectorIntensity(0.7);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,35 +23,30 @@ int main() {
dai::RemoteConnection remoteConnector(dai::RemoteConnection::DEFAULT_ADDRESS, webSocketPort, true, httpPort);
// Create pipeline
dai::Pipeline pipeline;
// Define sources and outputs
int fps = 30;
// Define sources and outputs
auto left = pipeline.create<dai::node::MonoCamera>();
auto right = pipeline.create<dai::node::MonoCamera>();
auto left = pipeline.create<dai::node::Camera>();
auto right = pipeline.create<dai::node::Camera>();
auto stereo = pipeline.create<dai::node::StereoDepth>();
auto rgbd = pipeline.create<dai::node::RGBD>()->build();
auto color = pipeline.create<dai::node::Camera>();
stereo->setExtendedDisparity(false);
color->build();

left->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
left->setCamera("left");
left->setFps(fps);
right->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
right->setCamera("right");
right->setFps(fps);
left->build(dai::CameraBoardSocket::LEFT);
right->build(dai::CameraBoardSocket::RIGHT);
stereo->setSubpixel(true);
stereo->setExtendedDisparity(false);
stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::DEFAULT);
stereo->setLeftRightCheck(true);
stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout
stereo->enableDistortionCorrection(true);
stereo->initialConfig.setLeftRightCheckThreshold(10);
auto *out = color->requestOutput(std::pair<int, int>(1280, 720), dai::ImgFrame::Type::RGB888i);
left->out.link(stereo->left);
right->out.link(stereo->right);

auto* out = color->requestOutput(std::pair<int, int>(1280, 720), dai::ImgFrame::Type::RGB888i);

out->link(stereo->inputAlignTo);
left->requestOutput(std::pair<int, int>(1280, 729))->link(stereo->left);
right->requestOutput(std::pair<int, int>(1280, 729))->link(stereo->right);


stereo->depth.link(rgbd->inDepth);
out->link(rgbd->inColor);

Expand Down
26 changes: 9 additions & 17 deletions examples/python/HostNodes/rgbd.py → examples/python/RGBD/rgbd.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,44 +22,36 @@ def run(self):
rr.log("world", rr.ViewCoordinates.FLU)
rr.log("world/ground", rr.Boxes3D(half_sizes=[3.0, 3.0, 0.00001]))
while self.isRunning():
pclObstData = self.inputPCL.get()
if pclObstData is not None:
points, colors = pclObstData.getPointsRGB()
inPointCloud = self.inputPCL.get()
if inPointCloud is not None:
points, colors = inPointCloud.getPointsRGB()
rr.log("world/pcl", rr.Points3D(points, colors=colors, radii=[0.01]))

# Create pipeline

with dai.Pipeline() as p:
fps = 30
# Define sources and outputs
left = p.create(dai.node.MonoCamera)
right = p.create(dai.node.MonoCamera)
left = p.create(dai.node.Camera)
right = p.create(dai.node.Camera)
color = p.create(dai.node.Camera)
stereo = p.create(dai.node.StereoDepth)
rgbd = p.create(dai.node.RGBD).build()
color.build()
rerunViewer = RerunNode()
left.setCamera("left")
left.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
left.setFps(fps)
right.setCamera("right")
right.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
right.setFps(fps)
left.build(dai.CameraBoardSocket.LEFT)
right.build(dai.CameraBoardSocket.RIGHT)
out = color.requestOutput((1280,720), dai.ImgFrame.Type.RGB888i)


out.link(stereo.inputAlignTo)
stereo.setExtendedDisparity(False)
stereo.setLeftRightCheck(True)
stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT)
stereo.setRectifyEdgeFillColor(0)
stereo.enableDistortionCorrection(True)
stereo.initialConfig.setLeftRightCheckThreshold(10)
stereo.setSubpixel(True)

# Linking
left.out.link(stereo.left)
right.out.link(stereo.right)
left.requestOutput((1280, 720)).link(stereo.left)
right.requestOutput((1280, 720)).link(stereo.right)
stereo.depth.link(rgbd.inDepth)
out.link(rgbd.inColor)

Expand Down
80 changes: 80 additions & 0 deletions examples/python/RGBD/rgbd_o3d.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@

import time
import depthai as dai
import sys
import numpy as np

try:
import open3d as o3d
except ImportError:
sys.exit("Critical dependency missing: Open3D. Please install it using the command: '{} -m pip install open3d' and then rerun the script.".format(sys.executable))


class O3DNode(dai.node.ThreadedHostNode):
def __init__(self):
dai.node.ThreadedHostNode.__init__(self)
self.inputPCL = self.createInput()


def run(self):
def key_callback(vis, action, mods):
global isRunning
if action == 0:
isRunning = False
pc = o3d.geometry.PointCloud()
vis = o3d.visualization.VisualizerWithKeyCallback()
vis.create_window()
vis.register_key_action_callback(81, key_callback)
pcd = o3d.geometry.PointCloud()
coordinateFrame = o3d.geometry.TriangleMesh.create_coordinate_frame(size=1000, origin=[0,0,0])
vis.add_geometry(coordinateFrame)
first = True
while self.isRunning():
inPointCloud = self.inputPCL.tryGet()
if inPointCloud is not None:
points, colors = inPointCloud.getPointsRGB()
pcd.points = o3d.utility.Vector3dVector(points.astype(np.float64))
colors = (colors.reshape(-1, 3) / 255.0).astype(np.float64)
pcd.colors = o3d.utility.Vector3dVector(colors)
if first:
vis.add_geometry(pcd)
first = False
else:
vis.update_geometry(pcd)
vis.poll_events()
vis.update_renderer()
vis.destroy_window()

# Create pipeline

with dai.Pipeline() as p:
fps = 30
# Define sources and outputs
left = p.create(dai.node.Camera)
right = p.create(dai.node.Camera)
color = p.create(dai.node.Camera)
stereo = p.create(dai.node.StereoDepth)
rgbd = p.create(dai.node.RGBD).build()
color.build()
o3dViewer = O3DNode()
left.build(dai.CameraBoardSocket.LEFT)
right.build(dai.CameraBoardSocket.RIGHT)
out = color.requestOutput((1280,720), dai.ImgFrame.Type.RGB888i)


out.link(stereo.inputAlignTo)
stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT)
stereo.setRectifyEdgeFillColor(0)
stereo.enableDistortionCorrection(True)

# Linking
left.requestOutput((1280, 720)).link(stereo.left)
right.requestOutput((1280, 720)).link(stereo.right)
stereo.depth.link(rgbd.inDepth)
out.link(rgbd.inColor)

rgbd.pcl.link(o3dViewer.inputPCL)

p.start()
while p.isRunning():
time.sleep(1)
Original file line number Diff line number Diff line change
Expand Up @@ -12,34 +12,26 @@
remoteConnector = dai.RemoteConnection(
webSocketPort=args.webSocketPort, httpPort=args.httpPort
)
fps = 30.0
left = p.create(dai.node.MonoCamera)
right = p.create(dai.node.MonoCamera)
left = p.create(dai.node.Camera)
right = p.create(dai.node.Camera)
color = p.create(dai.node.Camera)
stereo = p.create(dai.node.StereoDepth)
rgbd = p.create(dai.node.RGBD).build()

color.build()
left.setCamera("left")
left.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
left.setFps(fps)
right.setCamera("right")
right.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
right.setFps(fps)
left.build(dai.CameraBoardSocket.LEFT)
right.build(dai.CameraBoardSocket.RIGHT)
out = color.requestOutput((1280, 720))


out.link(stereo.inputAlignTo)
stereo.setExtendedDisparity(False)
stereo.setLeftRightCheck(True)
stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT)
stereo.setRectifyEdgeFillColor(0)
stereo.enableDistortionCorrection(True)
stereo.initialConfig.setLeftRightCheckThreshold(10)
stereo.setSubpixel(True)

# Linking
left.out.link(stereo.left)
right.out.link(stereo.right)
left.requestOutput((1280, 720)).link(stereo.left)
right.requestOutput((1280, 720)).link(stereo.right)
stereo.depth.link(rgbd.inDepth)
out.link(rgbd.inColor)
remoteConnector.addTopic("pcl", rgbd.pcl, "common")
Expand Down
1 change: 1 addition & 0 deletions include/depthai/pipeline/datatype/DatatypeEnum.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ enum class DatatypeEnum : std::int32_t {
TransformData,
PointCloudConfig,
PointCloudData,
RGBDData,
ImageAlignConfig,
ImgAnnotations
};
Expand Down
Loading

0 comments on commit 641df1c

Please sign in to comment.