Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/android/233.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android-在使用WebRTC发送到Wowza流媒体引擎之前旋转视频帧_Android_Java Native Interface_Webrtc_Wowza - Fatal编程技术网

Android-在使用WebRTC发送到Wowza流媒体引擎之前旋转视频帧

Android-在使用WebRTC发送到Wowza流媒体引擎之前旋转视频帧,android,java-native-interface,webrtc,wowza,Android,Java Native Interface,Webrtc,Wowza,我想使用WebRTC将视频从android摄像头传输到Wowza流媒体引擎(WSE)。当设备处于横向模式时,一切正常。然后,我尝试通过将设备置于纵向模式来传输数据 我在WSE播放器中注意到的第一件事是视频流被逆时针旋转了90度。我发现WebRTC在发送到WSE之前不会旋转来自onPreviewFrameAPI的每个视频帧,不幸的是,WSE至少到目前为止不支持任何旋转视频帧的机制 所以我检查了WebRTC android本机源代码,并对其进行了修改,以在发送到WSE之前旋转每个视频帧。现在我可以在

我想使用WebRTC将视频从android摄像头传输到Wowza流媒体引擎(WSE)。当设备处于横向模式时,一切正常。然后,我尝试通过将设备置于纵向模式来传输数据

我在WSE播放器中注意到的第一件事是视频流被逆时针旋转了90度。我发现WebRTC在发送到WSE之前不会旋转来自onPreviewFrameAPI的每个视频帧,不幸的是,WSE至少到目前为止不支持任何旋转视频帧的机制

所以我检查了WebRTC android本机源代码,并对其进行了修改,以在发送到WSE之前旋转每个视频帧。现在我可以在WSE播放器中看到纵向模式的视频流

但它有一个问题,就是有时候视频流看起来很奇怪。请看下面的图片

我把相机放在固定的位置。WSE player第一次显示第一个,但有时会显示第二个

这是我更改的WebRTC源代码中的文件。 ~/webrtc/src/sdk/android/src/jni/androidvideotracksource.cc

void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void*frame_数据,
整数长度,
整数宽度,
整数高度,
视频旋转,
int64(时间戳){
RTC_DCHECK(摄像头_线程_检查器_.CalledOnValidThread());
int64\u t camera\u time\u us=时间戳\u ns/rtc::KNumNanoSec微秒;
int64\u t翻译\u照相机\u时间\u我们=
时间戳对齐器TranslateTimestamp(摄像头时间戳,rtc::TimeMicros());
int-u宽度;
内倾高度;
int裁剪宽度;
国际作物联合会高度;
int crop_x;
国际作物研究所;
如果(!自适应帧(宽度、高度、相机时间和自适应宽度),
&调整高度、裁剪宽度、裁剪高度和裁剪x,
&作物(y){
返回;
}
const uint8_t*y_平面=静态(帧数据);
常数8_t*uv_平面=y_平面+宽度*高度;
const int uv_width=(宽度+1)/2;
RTC检查(长度、宽度*高度+2*紫外线宽度*((高度+1)/2));
//只能裁剪偶数像素。
作物x&=~1;
作物y&=~1;
//只需修改指针即可进行裁剪。
y_平面+=宽度*裁剪y+裁剪x;
uv_平面+=uv_宽度*裁剪_y+裁剪_x;
rtc::作用域_refptr缓冲区=
缓冲区\池\创建缓冲区(调整的\宽度、调整的\高度);
nv12toi420\u刻度盘。nv12toi420刻度盘(
y_平面,宽度,uv_平面,uv_宽度*2,裁剪_宽度,裁剪_高度,
buffer->MutableDataY(),buffer->stripy(),
//交换U和V,因为我们有NV21,而不是NV12。
buffer->MutableDataV(),buffer->StrideV(),buffer->MutableDataU(),
缓冲区->步幅(),缓冲区->宽度(),缓冲区->高度();
//TODO:将I420机架顺时针旋转90度。
rtc::作用域\u refptr旋转\u缓冲区=
I420Buffer::Rotate(*buffer,kVideoRotation_90);
OnFrame(视频帧(旋转的缓冲区、旋转、平移的摄像机、时间);
}
我添加了这行代码来顺时针旋转I420帧90度

// TODO: Rotate I420 frame 90 degrees clockwise.
  rtc::scoped_refptr<I420Buffer> rotated_buffer =
      I420Buffer::Rotate(*buffer, kVideoRotation_90);
//TODO:将I420帧顺时针旋转90度。
rtc::作用域\u refptr旋转\u缓冲区=
I420Buffer::Rotate(*buffer,kVideoRotation_90);

我将感谢任何帮助

我终于想出了解决这个问题的办法。以下是我的步骤: 第1步:确保将流媒体活动锁定为纵向 步骤2:在WebRTC android本机源代码中的文件中更改此方法 ~/webrtc/src/sdk/android/src/jni/androidvideotracksource.cc 原始版本:

void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                        int length,
                                                        int width,
                                                        int height,
                                                        VideoRotation rotation,
                                                        int64_t timestamp_ns) {
  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());

  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  int64_t translated_camera_time_us =
      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());

  int adapted_width;
  int adapted_height;
  int crop_width;
  int crop_height;
  int crop_x;
  int crop_y;

  if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
                  &adapted_height, &crop_width, &crop_height, &crop_x,
                  &crop_y)) {
    return;
  }

  const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
  const uint8_t* uv_plane = y_plane + width * height;
  const int uv_width = (width + 1) / 2;

  RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));

  // Can only crop at even pixels.
  crop_x &= ~1;
  crop_y &= ~1;
  // Crop just by modifying pointers.
  y_plane += width * crop_y + crop_x;
  uv_plane += uv_width * crop_y + crop_x;

  rtc::scoped_refptr<I420Buffer> buffer =
      buffer_pool_.CreateBuffer(adapted_width, adapted_height);

  nv12toi420_scaler_.NV12ToI420Scale(
      y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
      buffer->MutableDataY(), buffer->StrideY(),
      // Swap U and V, since we have NV21, not NV12.
      buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
      buffer->StrideU(), buffer->width(), buffer->height());

  OnFrame(VideoFrame(rotated_buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                        int length,
                                                        int width,
                                                        int height,
                                                        VideoRotation rotation,
                                                        int64_t timestamp_ns) {
  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());

  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  int64_t translated_camera_time_us =
      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());

  int adapted_width;
  int adapted_height;
  int crop_width;
  int crop_height;
  int crop_x;
  int crop_y;

  if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
                  &adapted_height, &crop_width, &crop_height, &crop_x,
                  &crop_y)) {
    return;
  }

  const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
  const uint8_t* uv_plane = y_plane + width * height;
  const int uv_width = (width + 1) / 2;

  RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));

  // Can only crop at even pixels.
  crop_x &= ~1;
  crop_y &= ~1;
  // Crop just by modifying pointers.
  y_plane += width * crop_y + crop_x;
  uv_plane += uv_width * crop_y + crop_x;

  rtc::scoped_refptr<I420Buffer> buffer =
      buffer_pool_.CreateBuffer(adapted_width, adapted_height);

  nv12toi420_scaler_.NV12ToI420Scale(
      y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
      buffer->MutableDataY(), buffer->StrideY(),
      // Swap U and V, since we have NV21, not NV12.
      buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
      buffer->StrideU(), buffer->width(), buffer->height());

  // TODO: Comment out this line of code to apply custom code.
  // OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));

  // TODO: The custom code to rotate video frame before passing
  // to next layers of WebRTC.

  // Rotate I420 frame rotation degrees.
  // Value of the rotation is 90 or 270 based on camera orientation.
  rtc::scoped_refptr<I420Buffer> rotated_buffer =
      I420Buffer::Rotate(*buffer, rotation);

  // Make sure the I420 frame has valid side in portrait mode.
  rtc::scoped_refptr<I420Buffer> final_buffer =
      buffer_pool_.CreateBuffer(height, width);
  final_buffer->ScaleFrom(*rotated_buffer);

  // After rotating the I420 frame, set value of the rotation to 0.
  // This mean we do not want to rotate the frame in next layers anymore.
  rotation = kVideoRotation_0;

  // Pass processed frame to the next layers.
  OnFrame(VideoFrame(final_buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void*frame_数据,
整数长度,
整数宽度,
整数高度,
视频旋转,
int64(时间戳){
RTC_DCHECK(摄像头_线程_检查器_.CalledOnValidThread());
int64\u t camera\u time\u us=时间戳\u ns/rtc::KNumNanoSec微秒;
int64\u t翻译\u照相机\u时间\u我们=
时间戳对齐器TranslateTimestamp(摄像头时间戳,rtc::TimeMicros());
int-u宽度;
内倾高度;
int裁剪宽度;
国际作物联合会高度;
int crop_x;
国际作物研究所;
如果(!自适应帧(宽度、高度、相机时间和自适应宽度),
&调整高度、裁剪宽度、裁剪高度和裁剪x,
&作物(y){
返回;
}
const uint8_t*y_平面=静态(帧数据);
常数8_t*uv_平面=y_平面+宽度*高度;
const int uv_width=(宽度+1)/2;
RTC检查(长度、宽度*高度+2*紫外线宽度*((高度+1)/2));
//只能裁剪偶数像素。
作物x&=~1;
作物y&=~1;
//只需修改指针即可进行裁剪。
y_平面+=宽度*裁剪y+裁剪x;
uv_平面+=uv_宽度*裁剪_y+裁剪_x;
rtc::作用域_refptr缓冲区=
缓冲区\池\创建缓冲区(调整的\宽度、调整的\高度);
nv12toi420\u刻度盘。nv12toi420刻度盘(
y_平面,宽度,uv_平面,uv_宽度*2,裁剪_宽度,裁剪_高度,
buffer->MutableDataY(),buffer->stripy(),
//交换U和V,因为我们有NV21,而不是NV12。
buffer->MutableDataV(),buffer->StrideV(),buffer->MutableDataU(),
缓冲区->步幅(),缓冲区->宽度(),缓冲区->高度();
OnFrame(视频帧(旋转的缓冲区、旋转、平移的摄像机、时间);
}
修改版本:

void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                        int length,
                                                        int width,
                                                        int height,
                                                        VideoRotation rotation,
                                                        int64_t timestamp_ns) {
  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());

  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  int64_t translated_camera_time_us =
      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());

  int adapted_width;
  int adapted_height;
  int crop_width;
  int crop_height;
  int crop_x;
  int crop_y;

  if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
                  &adapted_height, &crop_width, &crop_height, &crop_x,
                  &crop_y)) {
    return;
  }

  const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
  const uint8_t* uv_plane = y_plane + width * height;
  const int uv_width = (width + 1) / 2;

  RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));

  // Can only crop at even pixels.
  crop_x &= ~1;
  crop_y &= ~1;
  // Crop just by modifying pointers.
  y_plane += width * crop_y + crop_x;
  uv_plane += uv_width * crop_y + crop_x;

  rtc::scoped_refptr<I420Buffer> buffer =
      buffer_pool_.CreateBuffer(adapted_width, adapted_height);

  nv12toi420_scaler_.NV12ToI420Scale(
      y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
      buffer->MutableDataY(), buffer->StrideY(),
      // Swap U and V, since we have NV21, not NV12.
      buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
      buffer->StrideU(), buffer->width(), buffer->height());

  OnFrame(VideoFrame(rotated_buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                        int length,
                                                        int width,
                                                        int height,
                                                        VideoRotation rotation,
                                                        int64_t timestamp_ns) {
  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());

  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  int64_t translated_camera_time_us =
      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());

  int adapted_width;
  int adapted_height;
  int crop_width;
  int crop_height;
  int crop_x;
  int crop_y;

  if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
                  &adapted_height, &crop_width, &crop_height, &crop_x,
                  &crop_y)) {
    return;
  }

  const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
  const uint8_t* uv_plane = y_plane + width * height;
  const int uv_width = (width + 1) / 2;

  RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));

  // Can only crop at even pixels.
  crop_x &= ~1;
  crop_y &= ~1;
  // Crop just by modifying pointers.
  y_plane += width * crop_y + crop_x;
  uv_plane += uv_width * crop_y + crop_x;

  rtc::scoped_refptr<I420Buffer> buffer =
      buffer_pool_.CreateBuffer(adapted_width, adapted_height);

  nv12toi420_scaler_.NV12ToI420Scale(
      y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
      buffer->MutableDataY(), buffer->StrideY(),
      // Swap U and V, since we have NV21, not NV12.
      buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
      buffer->StrideU(), buffer->width(), buffer->height());

  // TODO: Comment out this line of code to apply custom code.
  // OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));

  // TODO: The custom code to rotate video frame before passing
  // to next layers of WebRTC.

  // Rotate I420 frame rotation degrees.
  // Value of the rotation is 90 or 270 based on camera orientation.
  rtc::scoped_refptr<I420Buffer> rotated_buffer =
      I420Buffer::Rotate(*buffer, rotation);

  // Make sure the I420 frame has valid side in portrait mode.
  rtc::scoped_refptr<I420Buffer> final_buffer =
      buffer_pool_.CreateBuffer(height, width);
  final_buffer->ScaleFrom(*rotated_buffer);

  // After rotating the I420 frame, set value of the rotation to 0.
  // This mean we do not want to rotate the frame in next layers anymore.
  rotation = kVideoRotation_0;

  // Pass processed frame to the next layers.
  OnFrame(VideoFrame(final_buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void*frame_数据