diff --git a/CMakeLists.txt b/CMakeLists.txt index 01f734de..2710f872 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -577,13 +577,18 @@ elseif (SORA_TARGET_OS STREQUAL "jetson") PUBLIC nvv4l2 nvv4lconvert - nvbuf_utils #nvbuf_fdmap #nvddk_vic #nvddk_2d_v2 nvjpeg nvbufsurface nvbufsurftransform + + # nvbuf_utils を NvUtils に移行した際、ドキュメントには libnvbuf_utils.so を参照するように記載があったが、 + # そのような so ファイルは存在しないためリンクが失敗した + # nvbuf_fdmap を追加したらリンクが通った + # https://developer.nvidia.com/sites/default/files/akamai/embedded/nvbuf_utils_to_nvutils_migration_guide.pdf + nvbuf_fdmap #nvos ) endif(USE_JETSON_ENCODER) diff --git a/multistrap/ubuntu-20.04_armv8_jetson.conf b/multistrap/ubuntu-20.04_armv8_jetson.conf index e769636d..ef5e19a3 100644 --- a/multistrap/ubuntu-20.04_armv8_jetson.conf +++ b/multistrap/ubuntu-20.04_armv8_jetson.conf @@ -13,11 +13,11 @@ components=main universe [Jetson] packages= source=https://repo.download.nvidia.com/jetson/common -suite=r35.2 +suite=r35.4 components=main [T194] packages=nvidia-l4t-camera nvidia-l4t-jetson-multimedia-api source=https://repo.download.nvidia.com/jetson/t194 -suite=r35.2 +suite=r35.4 components=main \ No newline at end of file diff --git a/src/hwenc_jetson/jetson_buffer.cpp b/src/hwenc_jetson/jetson_buffer.cpp index a475ab1a..d1e41417 100644 --- a/src/hwenc_jetson/jetson_buffer.cpp +++ b/src/hwenc_jetson/jetson_buffer.cpp @@ -9,7 +9,8 @@ #include // Jetson Linux Multimedia API -#include +#include +#include namespace sora { @@ -62,62 +63,65 @@ rtc::scoped_refptr JetsonBuffer::ToI420() { int32_t buffer_width = ((scaled_width_ + 15) / 16) * 16; int32_t buffer_height = ((scaled_height_ + 15) / 16) * 16; - NvBufferCreateParams input_params = {0}; - input_params.payloadType = NvBufferPayload_SurfArray; - input_params.width = buffer_width; - input_params.height = buffer_height; - input_params.layout = NvBufferLayout_Pitch; - input_params.colorFormat = NvBufferColorFormat_YUV420; - input_params.nvbuf_tag = NvBufferTag_NONE; - - int dmabuf_fd; - if (NvBufferCreateEx(&dmabuf_fd, &input_params) == -1) { - RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufferCreateEx"; + NvBufSurfaceAllocateParams input_params = {0}; + input_params.params.width = buffer_width; + input_params.params.height = buffer_height; + input_params.params.layout = NVBUF_LAYOUT_PITCH; + input_params.params.colorFormat = NVBUF_COLOR_FORMAT_YUV420; + input_params.params.memType = NVBUF_MEM_SURFACE_ARRAY; + input_params.memtag = NvBufSurfaceTag_NONE; + + NvBufSurface* dst_surf = 0; + + if (NvBufSurfaceAllocate( + &dst_surf, + 1, /* NvUtils では複数のバッファーを同時に初期化できるため、バッファーの数を指定する */ + &input_params) == -1) { + RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufSurfaceAllocate"; return scaled_buffer; } + NvBufSurfaceParams params = dst_surf->surfaceList[0]; - NvBufferParams params = {0}; - if (NvBufferGetParams(fd_, ¶ms) == -1) { - RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufferGetParams"; - return scaled_buffer; - } - - NvBufferRect src_rect, dest_rect; + NvBufSurfTransformRect src_rect, dest_rect; src_rect.top = 0; src_rect.left = 0; - src_rect.width = params.width[0]; - src_rect.height = params.height[0]; + src_rect.width = params.width; + src_rect.height = params.height; dest_rect.top = 0; dest_rect.left = 0; dest_rect.width = buffer_width; dest_rect.height = buffer_height; - NvBufferTransformParams trans_params; + NvBufSurfTransformParams trans_params; memset(&trans_params, 0, sizeof(trans_params)); - trans_params.transform_flag = NVBUFFER_TRANSFORM_FILTER; - trans_params.transform_flip = NvBufferTransform_None; - trans_params.transform_filter = NvBufferTransform_Filter_Smart; - trans_params.src_rect = src_rect; - trans_params.dst_rect = dest_rect; - - if (NvBufferTransform(fd_, dmabuf_fd, &trans_params) == -1) { - RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufferTransform"; + trans_params.transform_flag = NVBUFSURF_TRANSFORM_FILTER; + trans_params.transform_flip = NvBufSurfTransform_None; + trans_params.transform_filter = NvBufSurfTransformInter_Algo3; + trans_params.src_rect = &src_rect; + trans_params.dst_rect = &dest_rect; + + NvBufSurface* src_surf = 0; + if (NvBufSurfaceFromFd(fd_, (void**)(&src_surf)) == -1) { + RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufSurfaceFromFd"; return scaled_buffer; } - NvBufferParams dmabuf_params = {0}; - if (NvBufferGetParams(dmabuf_fd, &dmabuf_params) == -1) { - RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufferGetParams"; + if (NvBufSurfTransform(src_surf, dst_surf, &trans_params) != + NvBufSurfTransformError_Success) { + RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufSurfTransform"; return scaled_buffer; } int ret; void* data_addr; uint8_t* dest_addr; - for (int plane = 0; plane < dmabuf_params.num_planes; plane++) { - ret = NvBufferMemMap(dmabuf_fd, plane, NvBufferMem_Read, &data_addr); + int num_planes = dst_surf->surfaceList->planeParams.num_planes; + int index = 0; + for (int plane = 0; plane < num_planes; plane++) { + ret = NvBufSurfaceMap(dst_surf, index, plane, NVBUF_MAP_READ); if (ret == 0) { - NvBufferMemSyncForCpu(dmabuf_fd, plane, &data_addr); + NvBufSurfaceSyncForCpu(dst_surf, index, plane); + data_addr = dst_surf->surfaceList->mappedAddr.addr[plane]; int height, width; if (plane == 0) { dest_addr = scaled_buffer.get()->MutableDataY(); @@ -134,18 +138,20 @@ rtc::scoped_refptr JetsonBuffer::ToI420() { } for (int i = 0; i < height; i++) { memcpy(dest_addr + width * i, - (uint8_t*)data_addr + dmabuf_params.pitch[plane] * i, width); + (uint8_t*)data_addr + + dst_surf->surfaceList->planeParams.pitch[plane] * i, + width); } } - NvBufferMemUnMap(dmabuf_fd, plane, &data_addr); + NvBufSurfaceUnMap(dst_surf, index, plane); if (ret == -1) { RTC_LOG(LS_ERROR) << __FUNCTION__ - << " Failed to NvBufferMemMap plane=" << plane; + << " Failed to NvBufSurfaceMap plane=" << plane; return scaled_buffer; } } - NvBufferDestroy(dmabuf_fd); + NvBufSurfaceDestroy(dst_surf); return scaled_buffer; } else { diff --git a/src/hwenc_jetson/jetson_jpeg_decoder_pool.cpp b/src/hwenc_jetson/jetson_jpeg_decoder_pool.cpp index 84c279aa..f0b33176 100644 --- a/src/hwenc_jetson/jetson_jpeg_decoder_pool.cpp +++ b/src/hwenc_jetson/jetson_jpeg_decoder_pool.cpp @@ -11,15 +11,19 @@ namespace sora { std::shared_ptr JetsonJpegDecoderPool::Pop() { std::shared_ptr nv_decoder; - { - std::lock_guard lock(mtx_); - if (decoder_queue_.size() == 0) { - nv_decoder.reset(NvJPEGDecoder::createJPEGDecoder("jpegdec")); - } else { - nv_decoder = std::move(decoder_queue_.front()); - decoder_queue_.pop(); - } - } + // JetPack 5.1.2 で同じフレームが送信され続ける問題が発生したため、キューを無効化した + // JetPack 5.1.1 では正常に動作していた + // momo で同様の問題に対応した際の PR: https://github.com/shiguredo/momo/pull/297/ + // { + // std::lock_guard lock(mtx_); + // if (decoder_queue_.size() == 0) { + // nv_decoder.reset(NvJPEGDecoder::createJPEGDecoder("jpegdec")); + // } else { + // nv_decoder = std::move(decoder_queue_.front()); + // decoder_queue_.pop(); + // } + // } + nv_decoder.reset(NvJPEGDecoder::createJPEGDecoder("jpegdec")); std::shared_ptr decoder( new JetsonJpegDecoder(shared_from_this(), std::move(nv_decoder))); @@ -28,7 +32,7 @@ std::shared_ptr JetsonJpegDecoderPool::Pop() { void JetsonJpegDecoderPool::Push(std::shared_ptr decoder) { std::lock_guard lock(mtx_); - decoder_queue_.push(std::move(decoder)); + // decoder_queue_.push(std::move(decoder)); } } // namespace sora diff --git a/src/hwenc_jetson/jetson_video_decoder.cpp b/src/hwenc_jetson/jetson_video_decoder.cpp index 4bb9d805..8b2b5762 100644 --- a/src/hwenc_jetson/jetson_video_decoder.cpp +++ b/src/hwenc_jetson/jetson_video_decoder.cpp @@ -22,9 +22,11 @@ #include // L4T Multimedia API -#include +#include +#include // Jetson Linux Multimedia API +#include #include #define INIT_ERROR(cond, desc) \ @@ -218,7 +220,7 @@ bool JetsonVideoDecoder::JetsonRelease() { decoder_ = nullptr; } if (dst_dma_fd_ != -1) { - NvBufferDestroy(dst_dma_fd_); + NvBufSurf::NvDestroy(dst_dma_fd_); dst_dma_fd_ = -1; } return true; @@ -282,6 +284,7 @@ void JetsonVideoDecoder::CaptureLoop() { } NvBuffer* buffer; + while (1) { struct v4l2_buffer v4l2_buf; struct v4l2_plane planes[MAX_PLANES]; @@ -305,26 +308,23 @@ void JetsonVideoDecoder::CaptureLoop() { uint64_t pts = v4l2_buf.timestamp.tv_sec * rtc::kNumMicrosecsPerSec + v4l2_buf.timestamp.tv_usec; - NvBufferRect src_rect, dest_rect; - src_rect.top = capture_crop_->c.top; - src_rect.left = capture_crop_->c.left; - src_rect.width = capture_crop_->c.width; - src_rect.height = capture_crop_->c.height; - dest_rect.top = 0; - dest_rect.left = 0; - dest_rect.width = capture_crop_->c.width; - dest_rect.height = capture_crop_->c.height; - - NvBufferTransformParams transform_params; + NvBufSurf::NvCommonTransformParams transform_params; memset(&transform_params, 0, sizeof(transform_params)); - transform_params.transform_flag = NVBUFFER_TRANSFORM_FILTER; - transform_params.transform_flip = NvBufferTransform_None; - transform_params.transform_filter = NvBufferTransform_Filter_Smart; - transform_params.src_rect = src_rect; - transform_params.dst_rect = dest_rect; + transform_params.src_top = capture_crop_->c.top; + transform_params.src_left = capture_crop_->c.left; + transform_params.src_width = capture_crop_->c.width; + transform_params.src_height = capture_crop_->c.height; + transform_params.dst_top = 0; + transform_params.dst_left = 0; + transform_params.dst_width = capture_crop_->c.width; + transform_params.dst_height = capture_crop_->c.height; + transform_params.flag = NVBUFSURF_TRANSFORM_FILTER; + transform_params.flip = NvBufSurfTransform_None; + transform_params.filter = NvBufSurfTransformInter_Algo3; + // 何が来ても YUV420 に変換する - ret = NvBufferTransform(buffer->planes[0].fd, dst_dma_fd_, - &transform_params); + ret = NvBufSurf::NvTransform(&transform_params, buffer->planes[0].fd, + dst_dma_fd_); if (ret == -1) { RTC_LOG(LS_ERROR) << __FUNCTION__ << " Transform failed"; break; @@ -341,9 +341,6 @@ void JetsonVideoDecoder::CaptureLoop() { break; } - NvBufferParams parm; - ret = NvBufferGetParams(dst_dma_fd_, &parm); - void* src_data; uint8_t* dst_data; int dst_stride; @@ -360,13 +357,23 @@ void JetsonVideoDecoder::CaptureLoop() { } else { break; } - ret = NvBufferMemMap(dst_dma_fd_, i, NvBufferMem_Read, &src_data); - NvBufferMemSyncForCpu(dst_dma_fd_, i, &src_data); - for (uint32_t j = 0; j < parm.height[i]; j++) { - memcpy(dst_data + j * dst_stride, (char*)src_data + j * parm.pitch[i], - parm.width[i]); + + NvBufSurface* dst_surf = 0; + if (NvBufSurfaceFromFd(dst_dma_fd_, (void**)(&dst_surf)) == -1) { + RTC_LOG(LS_ERROR) << __FUNCTION__ << "Failed to NvBufSurfaceFromFd"; + break; + } + + ret = NvBufSurfaceMap(dst_surf, 0, i, NVBUF_MAP_READ); + NvBufSurfaceSyncForCpu(dst_surf, 0, i); + src_data = dst_surf->surfaceList[0].mappedAddr.addr[i]; + + NvBufSurfacePlaneParams params = dst_surf->surfaceList[0].planeParams; + for (uint32_t j = 0; j < params.height[i]; j++) { + memcpy(dst_data + j * dst_stride, + (char*)src_data + j * params.pitch[i], params.width[i]); } - NvBufferMemUnMap(dst_dma_fd_, i, &src_data); + NvBufSurfaceUnMap(dst_surf, 0, i); } webrtc::VideoFrame decoded_image = @@ -407,20 +414,20 @@ int JetsonVideoDecoder::SetCapture() { << "x" << format.fmt.pix_mp.height; if (dst_dma_fd_ != -1) { - NvBufferDestroy(dst_dma_fd_); + NvBufSurf::NvDestroy(dst_dma_fd_); dst_dma_fd_ = -1; } - NvBufferCreateParams input_params = {0}; - input_params.payloadType = NvBufferPayload_SurfArray; - input_params.width = capture_crop_->c.width; - input_params.height = capture_crop_->c.height; - input_params.layout = NvBufferLayout_Pitch; - input_params.colorFormat = NvBufferColorFormat_YUV420; - input_params.nvbuf_tag = NvBufferTag_VIDEO_DEC; + NvBufSurf::NvCommonAllocateParams cParams; + cParams.width = capture_crop_->c.width; + cParams.height = capture_crop_->c.height; + cParams.layout = NVBUF_LAYOUT_PITCH; + cParams.colorFormat = NVBUF_COLOR_FORMAT_YUV420; + cParams.memtag = NvBufSurfaceTag_VIDEO_DEC; + cParams.memType = NVBUF_MEM_SURFACE_ARRAY; - ret = NvBufferCreateEx(&dst_dma_fd_, &input_params); - INIT_ERROR(ret == -1, "create dmabuf failed"); + ret = NvBufSurf::NvAllocate(&cParams, 1, &dst_dma_fd_); + INIT_ERROR(ret == -1, "failed to NvBufSurfaceAllocate"); decoder_->capture_plane.deinitPlane(); diff --git a/src/hwenc_jetson/jetson_video_encoder.cpp b/src/hwenc_jetson/jetson_video_encoder.cpp index bbf8c582..2118d746 100644 --- a/src/hwenc_jetson/jetson_video_encoder.cpp +++ b/src/hwenc_jetson/jetson_video_encoder.cpp @@ -28,7 +28,7 @@ // L4T Multimedia API #include #include -#include +#include #include "sora/hwenc_jetson/jetson_buffer.h" @@ -748,11 +748,20 @@ int32_t JetsonVideoEncoder::Encode( input_frame.timestamp_us() % rtc::kNumMicrosecsPerSec; for (int i = 0; i < MAX_PLANES; i++) { - if (NvBufferMemSyncForDevice(buffer->planes[i].fd, i, - (void**)&buffer->planes[i].data) < 0) { - RTC_LOG(LS_ERROR) << "Failed to NvBufferMemSyncForDevice"; + NvBufSurface* surf = 0; + if (NvBufSurfaceFromFd(buffer->planes[i].fd, (void**)(&surf)) == -1) { + RTC_LOG(LS_ERROR) << __FUNCTION__ << "Failed to NvBufSurfaceFromFd"; return WEBRTC_VIDEO_CODEC_ERROR; } + + if (NvBufSurfaceSyncForDevice(surf, 0, i) == -1) { + RTC_LOG(LS_ERROR) << "Failed to NvBufSurfaceSyncForDevice"; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // ここで NvBufSurfaceDestroy が必要かなと思ったが、以下のサンプル・コードを確認したところ不要そうだった + // 参照: jetson_multimedia_api/samples/01_video_encode/video_encode_main.cpp + // NvBufSurfaceDestroy(surf); } if (encoder_->output_plane.qBuffer(v4l2_buf, nullptr) < 0) {