未验证 提交 47fb79bd 编写于 作者: H hzc 提交者: GitHub

Merge pull request #22936 from hzcyf:orbbec_new_cam_support

videoio: add Orbbec Gemini 2 and Astra 2 camera support

### Test Result

| OS | Compiler | Camera | Result |
|-----|-----------|---------|--------|
|Windows11| (VS2022)MSVC17.3|Orbbec Gemini 2|Pass|
|Windows11| (VS2022)MSVC17.3|Orbbec Astra 2|Pass|
|Ubuntu22.04|GCC9.2|Orbbec Gemini 2|Pass|
|Ubuntu22.04|GCC9.2|Orbbec Astra 2|Pass|

### Pull Request Readiness Checklist
- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [x] The PR is proposed to the proper branch
- [x] The feature is well documented and sample code can be built with the project CMake
上级 50da209d
......@@ -32,6 +32,11 @@
namespace cv {
namespace obsensor {
#define OBSENSOR_CAM_VID 0x2bc5 // usb vid
#define OBSENSOR_ASTRA2_PID 0x0660 // pid of Orbbec Astra 2 Camera
#define OBSENSOR_GEMINI2_PID 0x0670 // pid of Orbbec Gemini 2 Camera
enum StreamType
{
OBSENSOR_STREAM_IR = 1,
......@@ -45,6 +50,7 @@ enum FrameFormat
FRAME_FORMAT_YUYV = 0,
FRAME_FORMAT_MJPG = 5,
FRAME_FORMAT_Y16 = 8,
FRAME_FORMAT_Y14 = 9,
};
enum PropertyId
......@@ -93,6 +99,7 @@ public:
virtual bool getProperty(int propId, uint8_t* recvData, uint32_t* recvDataSize) = 0;
virtual StreamType streamType() const = 0;
virtual uint16_t getPid() const =0;
};
// "StreamChannelGroup" mean a group of stream channels from same one physical device
......
......@@ -474,13 +474,13 @@ STDMETHODIMP MSMFStreamChannel::OnReadSample(HRESULT hrStatus, DWORD dwStreamInd
if (sample)
{
ComPtr<IMFMediaBuffer> buffer = nullptr;
DWORD max_length, current_length;
DWORD maxLength, currentLength;
byte* byte_buffer = nullptr;
HR_FAILED_EXEC(sample->GetBufferByIndex(0, &buffer), { return S_OK; });
buffer->Lock(&byte_buffer, &max_length, &current_length);
Frame fo = { currentProfile_.format, currentProfile_.width, currentProfile_.height, current_length, (uint8_t*)byte_buffer };
buffer->Lock(&byte_buffer, &maxLength, &currentLength);
Frame fo = { currentProfile_.format, currentProfile_.width, currentProfile_.height, currentLength, (uint8_t*)byte_buffer };
if (depthFrameProcessor_)
{
depthFrameProcessor_->process(&fo);
......
......@@ -41,6 +41,11 @@ const uint8_t OB_EXT_CMD2[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0x56, 0x00
const uint8_t OB_EXT_CMD3[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0x58, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD4[16] = { 0x47, 0x4d, 0x02, 0x00, 0x03, 0x00, 0x60, 0x00, 0xed, 0x03, 0x00, 0x00 };
const uint8_t OB_EXT_CMD5[16] = { 0x47, 0x4d, 0x02, 0x00, 0x03, 0x00, 0x62, 0x00, 0xe9, 0x03, 0x00, 0x00 };
const uint8_t OB_EXT_CMD6[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0x7c, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00};
const uint8_t OB_EXT_CMD7[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0xfe, 0x12, 0x55, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD8[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0xfe, 0x13, 0x3f, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD9[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0xfa, 0x13, 0x4b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD10[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0xfa, 0x13, 0x3f, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00 };
#if defined(HAVE_OBSENSOR_V4L2)
#define fourCc2Int(a, b, c, d) \
......@@ -54,6 +59,7 @@ const std::map<uint32_t, FrameFormat> fourccToOBFormat = {
{fourCc2Int('Y', 'U', 'Y', '2'), FRAME_FORMAT_YUYV},
{fourCc2Int('M', 'J', 'P', 'G'), FRAME_FORMAT_MJPG},
{fourCc2Int('Y', '1', '6', ' '), FRAME_FORMAT_Y16},
{fourCc2Int('Y', '1', '4', ' '), FRAME_FORMAT_Y14},
};
StreamType parseUvcDeviceNameToStreamType(const std::string& devName)
......@@ -187,6 +193,54 @@ void DepthFrameProcessor::process(Frame* frame)
}
}
DepthFrameUnpacker::DepthFrameUnpacker(){
outputDataBuf_ = new uint8_t[OUT_DATA_SIZE];
}
DepthFrameUnpacker::~DepthFrameUnpacker(){
delete[] outputDataBuf_;
}
#define ON_BITS(count) ((1 << count) - 1)
#define CREATE_MASK(count, offset) (ON_BITS(count) << offset)
#define TAKE_BITS(source, count, offset) ((source & CREATE_MASK(count, offset)) >> offset)
void DepthFrameUnpacker::process(Frame *frame){
const uint8_t tarStep = 16;
const uint8_t srcStep = 28;
uint16_t *tar = (uint16_t *)outputDataBuf_;
uint8_t *src = frame->data;
uint32_t pixelSize = frame->width * frame->height;
for(uint32_t i = 0; i < pixelSize; i += tarStep) {
tar[0] = (TAKE_BITS(src[0], 8, 0) << 6) | TAKE_BITS(src[1], 6, 2);
tar[1] = (TAKE_BITS(src[1], 2, 0) << 12) | (TAKE_BITS(src[2], 8, 0) << 4) | TAKE_BITS(src[3], 4, 4);
tar[2] = (TAKE_BITS(src[3], 4, 0) << 10) | (TAKE_BITS(src[4], 8, 0) << 2) | TAKE_BITS(src[5], 2, 6);
tar[3] = (TAKE_BITS(src[5], 6, 0) << 8) | TAKE_BITS(src[6], 8, 0);
tar[4] = (TAKE_BITS(src[7], 8, 0) << 6) | TAKE_BITS(src[8], 6, 2);
tar[5] = (TAKE_BITS(src[8], 2, 0) << 12) | (TAKE_BITS(src[9], 8, 0) << 4) | TAKE_BITS(src[10], 4, 4);
tar[6] = (TAKE_BITS(src[10], 4, 0) << 10) | (TAKE_BITS(src[11], 8, 0) << 2) | TAKE_BITS(src[12], 2, 6);
tar[7] = (TAKE_BITS(src[12], 6, 0) << 8) | TAKE_BITS(src[13], 8, 0);
tar[8] = (TAKE_BITS(src[14], 8, 0) << 6) | TAKE_BITS(src[15], 6, 2);
tar[9] = (TAKE_BITS(src[15], 2, 0) << 12) | (TAKE_BITS(src[16], 8, 0) << 4) | TAKE_BITS(src[17], 4, 4);
tar[10] = (TAKE_BITS(src[17], 4, 0) << 10) | (TAKE_BITS(src[18], 8, 0) << 2) | TAKE_BITS(src[19], 2, 6);
tar[11] = (TAKE_BITS(src[19], 6, 0) << 8) | TAKE_BITS(src[20], 8, 0);
tar[12] = (TAKE_BITS(src[21], 8, 0) << 6) | TAKE_BITS(src[22], 6, 2);
tar[13] = (TAKE_BITS(src[22], 2, 0) << 12) | (TAKE_BITS(src[23], 8, 0) << 4) | TAKE_BITS(src[24], 4, 4);
tar[14] = (TAKE_BITS(src[24], 4, 0) << 10) | (TAKE_BITS(src[25], 8, 0) << 2) | TAKE_BITS(src[26], 2, 6);
tar[15] = (TAKE_BITS(src[26], 6, 0) << 8) | TAKE_BITS(src[27], 8, 0);
src += srcStep;
tar += tarStep;
}
frame->data = outputDataBuf_;
frame->format = FRAME_FORMAT_Y16;
}
IUvcStreamChannel::IUvcStreamChannel(const UvcDeviceInfo& devInfo) :
devInfo_(devInfo),
streamType_(parseUvcDeviceNameToStreamType(devInfo_.name))
......@@ -198,6 +252,10 @@ StreamType IUvcStreamChannel::streamType() const {
return streamType_;
}
uint16_t IUvcStreamChannel::getPid() const {
return devInfo_.pid;
};
bool IUvcStreamChannel::setProperty(int propId, const uint8_t* /*data*/, uint32_t /*dataSize*/)
{
uint8_t* rcvData;
......@@ -206,15 +264,28 @@ bool IUvcStreamChannel::setProperty(int propId, const uint8_t* /*data*/, uint32_
switch (propId)
{
case DEPTH_TO_COLOR_ALIGN:
// todo: value filling
rst &= setXu(2, OB_EXT_CMD0, sizeof(OB_EXT_CMD0));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD1, sizeof(OB_EXT_CMD1));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD2, sizeof(OB_EXT_CMD2));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD3, sizeof(OB_EXT_CMD3));
rst &= getXu(2, &rcvData, &rcvLen);
if(OBSENSOR_GEMINI2_PID == devInfo_.pid ){
rst &= setXu(2, OB_EXT_CMD8, sizeof(OB_EXT_CMD8));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD6, sizeof(OB_EXT_CMD6));
rst &= getXu(2, &rcvData, &rcvLen);
}
else if(OBSENSOR_ASTRA2_PID == devInfo_.pid ){
rst &= setXu(2, OB_EXT_CMD10, sizeof(OB_EXT_CMD8));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD6, sizeof(OB_EXT_CMD6));
rst &= getXu(2, &rcvData, &rcvLen);
}
else{
rst &= setXu(2, OB_EXT_CMD0, sizeof(OB_EXT_CMD0));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD1, sizeof(OB_EXT_CMD1));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD2, sizeof(OB_EXT_CMD2));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD3, sizeof(OB_EXT_CMD3));
rst &= getXu(2, &rcvData, &rcvLen);
}
break;
default:
rst = false;
......@@ -231,12 +302,50 @@ bool IUvcStreamChannel::getProperty(int propId, uint8_t* recvData, uint32_t* rec
switch (propId)
{
case CAMERA_PARAM:
rst &= setXu(2, OB_EXT_CMD5, sizeof(OB_EXT_CMD5));
rst &= getXu(2, &rcvData, &rcvLen);
if (rst && OB_EXT_CMD5[6] == rcvData[6] && rcvData[8] == 0 && rcvData[9] == 0)
{
memcpy(recvData, rcvData + 10, rcvLen - 10);
*recvDataSize = rcvLen - 10;
if(OBSENSOR_GEMINI2_PID == devInfo_.pid){
// return default param
CameraParam param;
param.p0[0] = 516.652f;
param.p0[1] = 516.692f;
param.p0[2] = 322.988f;
param.p0[3] = 235.787f;
param.p1[0] = 516.652f;
param.p1[1] = 516.692f;
param.p1[2] = 322.988f;
param.p1[3] = 235.787f;
param.p6[0] = 640;
param.p6[1] = 480;
param.p7[0] = 640;
param.p7[1] = 480;
*recvDataSize = sizeof(CameraParam);
memcpy(recvData, &param, *recvDataSize);
}
else if(OBSENSOR_ASTRA2_PID == devInfo_.pid){
// return default param
CameraParam param;
param.p0[0] = 558.151f;
param.p0[1] = 558.003f;
param.p0[2] = 312.546f;
param.p0[3] = 241.169f;
param.p1[0] = 558.151f;
param.p1[1] = 558.003f;
param.p1[2] = 312.546f;
param.p1[3] = 241.169f;
param.p6[0] = 640;
param.p6[1] = 480;
param.p7[0] = 640;
param.p7[1] = 480;
*recvDataSize = sizeof(CameraParam);
memcpy(recvData, &param, *recvDataSize);
}
else{
rst &= setXu(2, OB_EXT_CMD5, sizeof(OB_EXT_CMD5));
rst &= getXu(2, &rcvData, &rcvLen);
if (rst && OB_EXT_CMD5[6] == rcvData[6] && rcvData[8] == 0 && rcvData[9] == 0)
{
memcpy(recvData, rcvData + 10, rcvLen - 10);
*recvDataSize = rcvLen - 10;
}
}
break;
default:
......@@ -249,7 +358,20 @@ bool IUvcStreamChannel::getProperty(int propId, uint8_t* recvData, uint32_t* rec
bool IUvcStreamChannel::initDepthFrameProcessor()
{
if (streamType_ == OBSENSOR_STREAM_DEPTH && setXu(2, OB_EXT_CMD4, sizeof(OB_EXT_CMD4)))
if(OBSENSOR_GEMINI2_PID == devInfo_.pid || OBSENSOR_ASTRA2_PID == devInfo_.pid){
uint8_t* rcvData;
uint32_t rcvLen;
setXu(2, OB_EXT_CMD7, sizeof(OB_EXT_CMD7));
getXu(2, &rcvData, &rcvLen);
setXu(2, OB_EXT_CMD9, sizeof(OB_EXT_CMD9));
getXu(2, &rcvData, &rcvLen);
depthFrameProcessor_ = makePtr<DepthFrameUnpacker>();
return true;
}
else if (streamType_ == OBSENSOR_STREAM_DEPTH && setXu(2, OB_EXT_CMD4, sizeof(OB_EXT_CMD4)))
{
uint8_t* rcvData;
uint32_t rcvLen;
......
......@@ -26,8 +26,6 @@
#ifdef HAVE_OBSENSOR
namespace cv {
namespace obsensor {
#define OBSENSOR_CAM_VID 0x2bc5 // usb vid
#define XU_MAX_DATA_LENGTH 1024
#define XU_UNIT_ID 4
......@@ -60,17 +58,34 @@ struct OBExtensionParam {
float ps;
};
class DepthFrameProcessor {
class IFrameProcessor{
public:
virtual void process(Frame* frame) = 0;
virtual ~IFrameProcessor() = default;
};
class DepthFrameProcessor: public IFrameProcessor {
public:
DepthFrameProcessor(const OBExtensionParam& parma);
~DepthFrameProcessor() noexcept;
void process(Frame* frame);
virtual ~DepthFrameProcessor() noexcept;
virtual void process(Frame* frame) override;
private:
const OBExtensionParam param_;
uint16_t* lookUpTable_;
};
class DepthFrameUnpacker: public IFrameProcessor {
public:
DepthFrameUnpacker();
virtual ~DepthFrameUnpacker() noexcept;
virtual void process(Frame* frame) override;
private:
const uint32_t OUT_DATA_SIZE = 1280*800*2;
uint8_t *outputDataBuf_;
};
class IUvcStreamChannel : public IStreamChannel {
public:
IUvcStreamChannel(const UvcDeviceInfo& devInfo);
......@@ -79,6 +94,7 @@ public:
virtual bool setProperty(int propId, const uint8_t* data, uint32_t dataSize) override;
virtual bool getProperty(int propId, uint8_t* recvData, uint32_t* recvDataSize) override;
virtual StreamType streamType() const override;
virtual uint16_t getPid() const override;
protected:
virtual bool setXu(uint8_t ctrl, const uint8_t* data, uint32_t len) = 0;
......@@ -89,7 +105,7 @@ protected:
protected:
const UvcDeviceInfo devInfo_;
StreamType streamType_;
Ptr<DepthFrameProcessor> depthFrameProcessor_;
Ptr<IFrameProcessor> depthFrameProcessor_;
};
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR
......
......@@ -34,6 +34,8 @@ VideoCapture_obsensor::VideoCapture_obsensor(int index) : isOpened_(false)
{
static const obsensor::StreamProfile colorProfile = { 640, 480, 30, obsensor::FRAME_FORMAT_MJPG };
static const obsensor::StreamProfile depthProfile = {640, 480, 30, obsensor::FRAME_FORMAT_Y16};
static const obsensor::StreamProfile gemini2depthProfile = {1280, 800, 30, obsensor::FRAME_FORMAT_Y14};
static const obsensor::StreamProfile astra2depthProfile = {640, 480, 30, obsensor::FRAME_FORMAT_Y14};
streamChannelGroup_ = obsensor::getStreamChannelGroup(index);
if (!streamChannelGroup_.empty())
......@@ -47,15 +49,27 @@ VideoCapture_obsensor::VideoCapture_obsensor(int index) : isOpened_(false)
channel->start(colorProfile, [&](obsensor::Frame* frame) {
std::unique_lock<std::mutex> lk(frameMutex_);
colorFrame_ = Mat(1, frame->dataSize, CV_8UC1, frame->data).clone();
frameCv_.notify_all();
});
break;
case obsensor::OBSENSOR_STREAM_DEPTH:
{
uint8_t data = 1;
channel->setProperty(obsensor::DEPTH_TO_COLOR_ALIGN, &data, 1);
channel->start(depthProfile, [&](obsensor::Frame* frame) {
obsensor::StreamProfile profile = depthProfile;
if(OBSENSOR_GEMINI2_PID == channel->getPid()){
profile = gemini2depthProfile;
}
else if(OBSENSOR_ASTRA2_PID == channel->getPid()){
profile = astra2depthProfile;
}
channel->start(profile, [&](obsensor::Frame* frame) {
std::unique_lock<std::mutex> lk(frameMutex_);
depthFrame_ = Mat(frame->height, frame->width, CV_16UC1, frame->data, frame->width * 2).clone();
frameCv_.notify_all();
});
uint32_t len;
......@@ -72,10 +86,21 @@ VideoCapture_obsensor::VideoCapture_obsensor(int index) : isOpened_(false)
}
}
VideoCapture_obsensor::~VideoCapture_obsensor(){
for (auto& channel : streamChannelGroup_)
{
channel->stop();
}
streamChannelGroup_.clear();
}
bool VideoCapture_obsensor::grabFrame()
{
std::unique_lock<std::mutex> lk(frameMutex_);
// Try waiting for 33 milliseconds to ensure that both depth and color frame have been received!
frameCv_.wait_for(lk, std::chrono::milliseconds(33), [&](){ return !depthFrame_.empty() && !colorFrame_.empty(); });
grabbedDepthFrame_ = depthFrame_;
grabbedColorFrame_ = colorFrame_;
......@@ -93,7 +118,18 @@ bool VideoCapture_obsensor::retrieveFrame(int outputType, OutputArray frame)
case CAP_OBSENSOR_DEPTH_MAP:
if (!grabbedDepthFrame_.empty())
{
grabbedDepthFrame_.copyTo(frame);
if(OBSENSOR_GEMINI2_PID == streamChannelGroup_.front()->getPid()){
grabbedDepthFrame_ = grabbedDepthFrame_*0.8;
Rect rect(320, 160, 640, 480);
grabbedDepthFrame_(rect).copyTo(frame);
}
else if(OBSENSOR_ASTRA2_PID == streamChannelGroup_.front()->getPid()){
grabbedDepthFrame_ = grabbedDepthFrame_*0.8;
grabbedDepthFrame_.copyTo(frame);
}
else{
grabbedDepthFrame_.copyTo(frame);
}
grabbedDepthFrame_.release();
return true;
}
......
......@@ -24,6 +24,7 @@
#include <map>
#include <mutex>
#include <condition_variable>
#include "cap_obsensor/obsensor_stream_channel_interface.hpp"
......@@ -33,7 +34,7 @@ class VideoCapture_obsensor : public IVideoCapture
{
public:
VideoCapture_obsensor(int index);
virtual ~VideoCapture_obsensor() {}
virtual ~VideoCapture_obsensor();
virtual double getProperty(int propIdx) const CV_OVERRIDE;
virtual bool setProperty(int propIdx, double propVal) CV_OVERRIDE;
......@@ -51,6 +52,7 @@ private:
std::vector<Ptr<obsensor::IStreamChannel>> streamChannelGroup_;
std::mutex frameMutex_;
std::condition_variable frameCv_;
Mat depthFrame_;
Mat colorFrame_;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册