diff --git a/src/capturer/libargus_egl_capturer.cpp b/src/capturer/libargus_egl_capturer.cpp index 701be06..e07df52 100644 --- a/src/capturer/libargus_egl_capturer.cpp +++ b/src/capturer/libargus_egl_capturer.cpp @@ -18,7 +18,7 @@ LibargusEglCapturer::LibargusEglCapturer(Args args) : camera_id_(args.camera_id), num_streams_(args.num_streams), fps_(args.fps), - format_(args.format), + format_(V4L2_PIX_FMT_NV12), config_(args) {} LibargusEglCapturer::~LibargusEglCapturer() { @@ -267,7 +267,7 @@ int LibargusEglCapturer::height(int stream_idx) const { return stream_handlers_[stream_idx]->height(); } -bool LibargusEglCapturer::is_dma_capture() const { return false; } +bool LibargusEglCapturer::is_dma_capture() const { return true; } uint32_t LibargusEglCapturer::format() const { return format_; } @@ -290,8 +290,8 @@ void StreamHandler::CaptureImage() { return; } if (dma_fd_ == -1) { - dma_fd_ = - native_buffer->createNvBuffer(size_, NVBUF_COLOR_FORMAT_YUV420, NVBUF_LAYOUT_PITCH); + dma_fd_ = native_buffer->createNvBuffer(size_, NVBUF_COLOR_FORMAT_NV12, + NVBUF_LAYOUT_BLOCK_LINEAR); if (dma_fd_ < 0) { return; } @@ -305,31 +305,6 @@ void StreamHandler::CaptureImage() { frame_buffer_->SetDmaFd(dma_fd_); frame_buffer_->SetTimestamp(timestamp); - NvBufSurface *nvbuf = nullptr; - if (NvBufSurfaceFromFd(dma_fd_, reinterpret_cast(&nvbuf)) != 0) { - DestroyNvBufferFromFd(); - return; - } - - auto &surf = nvbuf->surfaceList[0]; - int offset = 0; - for (int p = 0; p < surf.planeParams.num_planes; ++p) { - if (NvBufSurfaceMap(nvbuf, 0, p, NVBUF_MAP_READ) != 0) - continue; - if (NvBufSurfaceSyncForCpu(nvbuf, 0, p) != 0) { - NvBufSurfaceUnMap(nvbuf, 0, p); - continue; - } - uint8_t *addr = static_cast(surf.mappedAddr.addr[p]); - for (uint32_t row = 0; row < surf.planeParams.height[p]; ++row) { - int row_size = surf.planeParams.width[p] * surf.planeParams.bytesPerPix[p]; - memcpy(frame_buffer_->MutableData() + offset, addr + row * surf.planeParams.pitch[p], - row_size); - offset += row_size; - } - NvBufSurfaceUnMap(nvbuf, 0, p); - } - Next(frame_buffer_); } diff --git a/src/common/CMakeLists.txt b/src/common/CMakeLists.txt index 58880e9..c7a2511 100644 --- a/src/common/CMakeLists.txt +++ b/src/common/CMakeLists.txt @@ -3,10 +3,30 @@ project(common) find_package(JPEG REQUIRED) include_directories(${JPEG_INCLUDE_DIR}) -aux_source_directory(${PROJECT_SOURCE_DIR} COMMON_FILES) +set(COMMON_FILES + ${PROJECT_SOURCE_DIR}/logging.cpp + ${PROJECT_SOURCE_DIR}/v4l2_frame_buffer.cpp + ${PROJECT_SOURCE_DIR}/utils.cpp + ${PROJECT_SOURCE_DIR}/v4l2_utils.cpp + ${PROJECT_SOURCE_DIR}//worker.cpp +) + +if(JETSON_PLATFORM) + set(MULTIMEDIA_API_BASE /usr/src/jetson_multimedia_api) + + list(APPEND COMMON_FILES + ${PROJECT_SOURCE_DIR}/nv_utils.cpp + ) +endif() add_library(${PROJECT_NAME} ${COMMON_FILES}) +if(JETSON_PLATFORM) + target_include_directories(${PROJECT_NAME} PUBLIC + ${MULTIMEDIA_API_BASE}/include + ) +endif() + # Use libyuv in libwebrtc.a target_link_libraries(${PROJECT_NAME} avutil diff --git a/src/common/nv_utils.cpp b/src/common/nv_utils.cpp new file mode 100644 index 0000000..2bad517 --- /dev/null +++ b/src/common/nv_utils.cpp @@ -0,0 +1,85 @@ +#include "nv_utils.h" + +#include +#include + +int NvUtils::ConvertToI420(int src_dma_fd, uint8_t *dst_addr, size_t dst_size, int width, + int height) { + int dst_dma_fd; + NvBufSurf::NvCommonAllocateParams cParams; + cParams.width = width; + cParams.height = height; + cParams.layout = NVBUF_LAYOUT_PITCH; + cParams.colorFormat = NVBUF_COLOR_FORMAT_YUV420; + cParams.memtag = NvBufSurfaceTag_CAMERA; + cParams.memType = NVBUF_MEM_SURFACE_ARRAY; + int ret = NvBufSurf::NvAllocate(&cParams, 1, &dst_dma_fd); + if (ret < 0) { + return ret; + } + + NvBufSurf::NvCommonTransformParams transform_params; + memset(&transform_params, 0, sizeof(transform_params)); + transform_params.src_top = 0; + transform_params.src_left = 0; + transform_params.src_width = width; + transform_params.src_height = height; + transform_params.dst_top = 0; + transform_params.dst_left = 0; + transform_params.dst_width = width; + transform_params.dst_height = height; + transform_params.flag = NVBUFSURF_TRANSFORM_FILTER; + transform_params.flip = NvBufSurfTransform_None; + transform_params.filter = NvBufSurfTransformInter_Algo3; + + ret = NvBufSurf::NvTransform(&transform_params, src_dma_fd, dst_dma_fd); + if (ret < 0) { + NvBufSurf::NvDestroy(dst_dma_fd); + return ret; + } + + ret = ReadDmaBuffer(dst_dma_fd, dst_addr, dst_size); + + NvBufSurf::NvDestroy(dst_dma_fd); + + return ret; +} + +int NvUtils::ReadDmaBuffer(int src_dma_fd, uint8_t *dst_addr, size_t dst_size) { + if (src_dma_fd <= 0) + return -1; + + int ret = -1; + + NvBufSurface *nvbuf_surf = 0; + ret = NvBufSurfaceFromFd(src_dma_fd, (void **)(&nvbuf_surf)); + if (ret != 0) { + return -1; + } + + int offset = 0; + + for (int plane = 0; plane < nvbuf_surf->surfaceList->planeParams.num_planes; ++plane) { + NvBufSurfaceMap(nvbuf_surf, 0, plane, NVBUF_MAP_READ); + NvBufSurfaceSyncForCpu(nvbuf_surf, 0, plane); + + uint8_t *src_addr = static_cast(nvbuf_surf->surfaceList->mappedAddr.addr[plane]); + int row_size = nvbuf_surf->surfaceList->planeParams.width[plane] * + nvbuf_surf->surfaceList->planeParams.bytesPerPix[plane]; + + for (uint row = 0; row < nvbuf_surf->surfaceList->planeParams.height[plane]; ++row) { + memcpy(dst_addr + offset, + src_addr + row * nvbuf_surf->surfaceList->planeParams.pitch[plane], row_size); + offset += row_size; + } + + NvBufSurfaceSyncForDevice(nvbuf_surf, 0, plane); + ret = NvBufSurfaceUnMap(nvbuf_surf, 0, plane); + if (ret < 0) { + printf("Error while Unmapping buffer\n"); + return ret; + } + } + + return 0; +} diff --git a/src/common/nv_utils.h b/src/common/nv_utils.h new file mode 100644 index 0000000..3ce57d5 --- /dev/null +++ b/src/common/nv_utils.h @@ -0,0 +1,14 @@ +#ifndef NV_UTILS_ +#define NV_UTILS_ + +#include +#include + +class NvUtils { + public: + static int ConvertToI420(int src_dma_fd, uint8_t *dst_addr, size_t dst_size, int width, + int height); + static int ReadDmaBuffer(int src_dma_fd, uint8_t *dst_addr, size_t dst_size); +}; + +#endif // NV_UTILS_ diff --git a/src/common/v4l2_frame_buffer.cpp b/src/common/v4l2_frame_buffer.cpp index 2ac7de3..ead5b67 100644 --- a/src/common/v4l2_frame_buffer.cpp +++ b/src/common/v4l2_frame_buffer.cpp @@ -2,6 +2,9 @@ #include "common/logging.h" #include +#if defined(USE_LIBARGUS_CAPTURE) +#include "common/nv_utils.h" +#endif #include @@ -18,62 +21,61 @@ rtc::scoped_refptr V4L2FrameBuffer::Create(int width, int heigh return rtc::make_ref_counted(width, height, buffer); } -V4L2FrameBuffer::V4L2FrameBuffer(int width, int height, V4L2Buffer buffer) - : width_(width), - height_(height), - format_(buffer.pix_fmt), - size_(buffer.length), - flags_(buffer.flags), - timestamp_(buffer.timestamp), - buffer_(buffer), - has_mutable_data_(false), - data_(nullptr) {} - -V4L2FrameBuffer::V4L2FrameBuffer(int width, int height, int size, uint32_t format) +V4L2FrameBuffer::V4L2FrameBuffer(int width, int height, uint32_t format, int size, uint32_t flags, + timeval timestamp) : width_(width), height_(height), format_(format), size_(size), - flags_(0), - timestamp_({0, 0}), + flags_(flags), + timestamp_(timestamp), buffer_({}), - has_mutable_data_(true), - data_(static_cast(webrtc::AlignedMalloc(size_, kBufferAlignment))) {} + data_(nullptr) {} + +V4L2FrameBuffer::V4L2FrameBuffer(int width, int height, V4L2Buffer buffer) + : V4L2FrameBuffer(width, height, buffer.pix_fmt, buffer.length, buffer.flags, + buffer.timestamp) { + buffer_ = buffer; +} + +V4L2FrameBuffer::V4L2FrameBuffer(int width, int height, int size, uint32_t format) + : V4L2FrameBuffer(width, height, format, size, 0, {0, 0}) { + data_.reset(static_cast(webrtc::AlignedMalloc(size_, kBufferAlignment))); +} V4L2FrameBuffer::~V4L2FrameBuffer() {} webrtc::VideoFrameBuffer::Type V4L2FrameBuffer::type() const { return Type::kNative; } int V4L2FrameBuffer::width() const { return width_; } - int V4L2FrameBuffer::height() const { return height_; } - uint32_t V4L2FrameBuffer::format() const { return format_; } - uint32_t V4L2FrameBuffer::size() const { return size_; } - uint32_t V4L2FrameBuffer::flags() const { return flags_; } - timeval V4L2FrameBuffer::timestamp() const { return timestamp_; } rtc::scoped_refptr V4L2FrameBuffer::ToI420() { rtc::scoped_refptr i420_buffer(webrtc::I420Buffer::Create(width_, height_)); i420_buffer->InitializeData(); + const uint8_t *src = static_cast(Data()); + if (format_ == V4L2_PIX_FMT_YUV420) { - memcpy(i420_buffer->MutableDataY(), - has_mutable_data_ ? data_.get() : (uint8_t *)buffer_.start, size_); - } else if (format_ == V4L2_PIX_FMT_H264) { - // use hw decoded frame from track. + memcpy(i420_buffer->MutableDataY(), src, size_); } else { - if (libyuv::ConvertToI420(has_mutable_data_ ? data_.get() : (uint8_t *)buffer_.start, size_, - i420_buffer.get()->MutableDataY(), i420_buffer.get()->StrideY(), - i420_buffer.get()->MutableDataU(), i420_buffer.get()->StrideU(), - i420_buffer.get()->MutableDataV(), i420_buffer.get()->StrideV(), - 0, 0, width_, height_, width_, height_, libyuv::kRotate0, - format_) < 0) { +#if defined(USE_LIBARGUS_CAPTURE) + if (NvUtils::ConvertToI420(buffer_.dmafd, i420_buffer->MutableDataY(), size_, width_, + height_) < 0) { + ERROR_PRINT("NvUtils ConvertToI420 Failed"); + } +#else + if (libyuv::ConvertToI420(src, size_, i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), 0, 0, width_, + height_, width_, height_, libyuv::kRotate0, format_) < 0) { ERROR_PRINT("libyuv ConvertToI420 Failed"); } +#endif } return i420_buffer; @@ -81,15 +83,10 @@ rtc::scoped_refptr V4L2FrameBuffer::ToI420() { V4L2Buffer V4L2FrameBuffer::GetRawBuffer() { return buffer_; } -const void *V4L2FrameBuffer::Data() const { - if (has_mutable_data_) { - return data_.get(); - } - return buffer_.start; -} +const void *V4L2FrameBuffer::Data() const { return data_ ? data_.get() : buffer_.start; } uint8_t *V4L2FrameBuffer::MutableData() { - if (!has_mutable_data_) { + if (!data_) { throw std::runtime_error( "MutableData() is not supported for frames directly created from V4L2 buffers. Use " "Clone() to create an owning (writable) copy before calling MutableData()."); @@ -99,26 +96,19 @@ uint8_t *V4L2FrameBuffer::MutableData() { int V4L2FrameBuffer::GetDmaFd() const { return buffer_.dmafd; } -bool V4L2FrameBuffer::SetDmaFd(int fd) { - if (fd <= 0) { - return false; +void V4L2FrameBuffer::SetDmaFd(int fd) { + if (fd > 0) { + buffer_.dmafd = fd; } - - buffer_.dmafd = fd; - return true; } void V4L2FrameBuffer::SetTimestamp(timeval timestamp) { timestamp_ = timestamp; } -/* Return a new refptr with copied metadata and frame data. */ rtc::scoped_refptr V4L2FrameBuffer::Clone() const { auto clone = rtc::make_ref_counted(width_, height_, size_, format_); - if (has_mutable_data_) { - memcpy(clone->data_.get(), data_.get(), size_); - } else { - memcpy(clone->data_.get(), buffer_.start, size_); - } + memcpy(clone->MutableData(), Data(), size_); + clone->SetDmaFd(buffer_.dmafd); clone->flags_ = flags_; clone->timestamp_ = timestamp_; diff --git a/src/common/v4l2_frame_buffer.h b/src/common/v4l2_frame_buffer.h index c00ad3e..cce4502 100644 --- a/src/common/v4l2_frame_buffer.h +++ b/src/common/v4l2_frame_buffer.h @@ -31,7 +31,7 @@ class V4L2FrameBuffer : public webrtc::VideoFrameBuffer { uint8_t *MutableData(); V4L2Buffer GetRawBuffer(); int GetDmaFd() const; - bool SetDmaFd(int fd); + void SetDmaFd(int fd); void SetTimestamp(timeval timestamp); rtc::scoped_refptr Clone() const; @@ -46,10 +46,12 @@ class V4L2FrameBuffer : public webrtc::VideoFrameBuffer { const uint32_t format_; uint32_t size_; uint32_t flags_; - bool has_mutable_data_; timeval timestamp_; V4L2Buffer buffer_; - const std::unique_ptr data_; + std::unique_ptr data_; + + V4L2FrameBuffer(int width, int height, uint32_t format, int size, uint32_t flags, + timeval timestamp); }; #endif // V4L2_FRAME_BUFFER_H_ diff --git a/src/common/v4l2_utils.h b/src/common/v4l2_utils.h index e46a7eb..212eb6c 100644 --- a/src/common/v4l2_utils.h +++ b/src/common/v4l2_utils.h @@ -10,47 +10,38 @@ /* Save single-plane data with stride equal to width */ struct V4L2Buffer { void *start = nullptr; - uint32_t pix_fmt; - uint32_t length; + uint32_t pix_fmt = 0; + uint32_t length = 0; uint32_t flags = 0; int dmafd = -1; struct timeval timestamp = {0, 0}; - struct v4l2_buffer inner; + struct v4l2_buffer inner = {}; struct v4l2_plane plane[VIDEO_MAX_PLANES]; V4L2Buffer() = default; + V4L2Buffer(void *data, uint32_t fmt, uint32_t len, int fd, uint32_t f, timeval ts) + : start(data), + pix_fmt(fmt), + length(len), + dmafd(fd), + flags(f), + timestamp(ts) {} + static V4L2Buffer FromV4L2(void *start, const v4l2_buffer &v4l2, uint32_t fmt) { - V4L2Buffer buf; - buf.start = start; - buf.pix_fmt = fmt; - buf.flags = v4l2.flags; - buf.length = v4l2.bytesused; - buf.timestamp = v4l2.timestamp; + V4L2Buffer buf(start, fmt, v4l2.bytesused, -1, v4l2.flags, v4l2.timestamp); buf.inner = v4l2; return buf; } static V4L2Buffer FromLibcamera(void *start, int length, int dmafd, timeval timestamp, uint32_t fmt) { - V4L2Buffer buf; - buf.start = start; - buf.dmafd = dmafd; - buf.pix_fmt = fmt; - buf.length = length; - buf.timestamp = timestamp; - return buf; + return V4L2Buffer(start, fmt, length, dmafd, 0, timestamp); } static V4L2Buffer FromCapturedPlane(void *start, uint32_t bytesused, int dmafd, uint32_t flags, - uint32_t pix_fmt) { - V4L2Buffer buf; - buf.start = start; - buf.dmafd = dmafd; - buf.pix_fmt = pix_fmt; - buf.length = bytesused; - buf.flags = flags; - return buf; + uint32_t fmt) { + return V4L2Buffer(start, fmt, bytesused, dmafd, flags, {0, 0}); } };