
2015年,我们发布了第一版的Android平台RTMP摄像头|屏幕直播推送模块,几经迭代,功能强大、性能优异,在前些年几乎已经是业内延迟体验和口碑最好的RTMP模块了(毫秒级延迟,低延迟模式下100多毫秒)。鉴于我们侧重于传统行业音视频直播方案,我们从以下几个维度,介绍下Android平台RTMP摄像头采集推送模块的使用场景。
在 AndroidManifest.xml 文件中添加必要的权限:
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />以Camera2采集为例,大概的实现如下:
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public class Camera2Activity extends AppCompatActivity {
private TextureView textureView;
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSession;
private ImageReader imageReader;
private HandlerThread backgroundThread;
private Handler backgroundHandler;
private final String cameraId = "0";
private Size previewSize;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera2);
textureView = findViewById(R.id.texture_view);
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private void openCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
return;
}
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
previewSize = map.getOutputSizes(SurfaceTexture.class)[0];
cameraManager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
cameraDevice = camera;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
camera.close();
cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
camera.close();
cameraDevice = null;
}
};
@SuppressLint("MissingPermission")
private void createCameraPreviewSession() {
try {
SurfaceTexture surfaceTexture = textureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
Surface surface = new Surface(surfaceTexture);
CaptureRequest.Builder captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
if (cameraDevice == null) {
return;
}
Camera2Activity.this.cameraCaptureSession = cameraCaptureSession;
try {
CaptureRequest captureRequest = captureRequestBuilder.build();
cameraCaptureSession.setRepeatingRequest(captureRequest, null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
}
}, null);
imageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
imageReader.setOnImageAvailableListener(imageAvailableListener, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private final ImageReader.OnImageAvailableListener imageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
if (image!= null) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
// 在这里可以将采集到的图像数据进行处理或直接推送RTMP
image.close();
}
}
};
@Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (textureView.isAvailable()) {
openCamera();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
protected void onPause() {
stopCameraPreviewSession();
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void stopCameraPreviewSession() {
if (cameraCaptureSession!= null) {
try {
cameraCaptureSession.stopRepeating();
cameraCaptureSession.close();
cameraCaptureSession = null;
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
private void closeCamera() {
if (cameraDevice!= null) {
cameraDevice.close();
cameraDevice = null;
}
}
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}如果是CameraX,采集代码实现如下:
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.os.Bundle;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;
import com.google.common.util.concurrent.ListenableFuture;
import java.nio.ByteBuffer;
import java.util.concurrent.ExecutionException;
public class CameraXActivity extends AppCompatActivity {
private TextureView textureView;
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camerax);
textureView = findViewById(R.id.texture_view);
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
startCameraX(cameraProvider);
} catch (ExecutionException | InterruptedException e) {
Log.e("CameraXActivity", "Error starting camera", e);
}
}, ContextCompat.getMainExecutor(this));
}
@SuppressLint("UnsafeExperimentalUsageError")
private void startCameraX(ProcessCameraProvider cameraProvider) {
if (ActivityCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
return;
}
Preview preview = new Preview.Builder()
.build();
CameraSelector cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA;
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
.setTargetResolution(new Size(640, 480))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(this), new ImageAnalysis.Analyzer() {
@Override
public void analyze(@NonNull ImageProxy image) {
if (image.getFormat() == ImageFormat.YUV_420_888) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
// 在这里可以将采集到的图像数据进行处理或直接推送RTMP
image.close();
}
}
});
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner) this, cameraSelector, preview, imageAnalysis);
preview.setSurfaceProvider(textureView.getSurfaceProvider());
}
}获取到数据,大牛直播SDK的RTMP推送模块,数据投递接口实现如下:

@Override
public void onCameraImageData(Image image) {
Image.Plane[] planes = image.getPlanes();
int w = image.getWidth(), h = image.getHeight();
int y_offset = 0, u_offset = 0, v_offset = 0;
int scale_w = 0, scale_h = 0, scale_filter_mode = 0;
scale_filter_mode = 3;
int rotation_degree = cameraImageRotationDegree_;
if (rotation_degree < 0) {
Log.i(TAG, "onCameraImageData rotation_degree < 0, may need to set orientation_ to 0, 90, 180 or 270");
return;
}
for (LibPublisherWrapper i : publisher_array_)
i.PostLayerImageYUV420888ByteBuffer(0, 0, 0,
planes[0].getBuffer(), y_offset, planes[0].getRowStride(),
planes[1].getBuffer(), u_offset, planes[1].getRowStride(),
planes[2].getBuffer(), v_offset, planes[2].getRowStride(), planes[1].getPixelStride(),
w, h, 0, 0,
scale_w, scale_h, scale_filter_mode, rotation_degree);
}
封装实现如下:
/*
* LibPublisherWrapper.java
* Created by daniusdk.com
* WeChat: xinsheng120
*/
public boolean PostLayerImageYUV420888ByteBuffer(int index, int left, int top,
ByteBuffer y_plane, int y_offset, int y_row_stride,
ByteBuffer u_plane, int u_offset, int u_row_stride,
ByteBuffer v_plane, int v_offset, int v_row_stride, int uv_pixel_stride,
int width, int height, int is_vertical_flip, int is_horizontal_flip,
int scale_width, int scale_height, int scale_filter_mode,
int rotation_degree) {
if (!check_native_handle())
return false;
if (!read_lock_.tryLock())
return false;
try {
if (!check_native_handle())
return false;
return OK == lib_publisher_.PostLayerImageYUV420888ByteBuffer(get(), index, left, top, y_plane, y_offset, y_row_stride,
u_plane, u_offset, u_row_stride, v_plane, v_offset, v_row_stride, uv_pixel_stride,
width, height, is_vertical_flip, is_horizontal_flip, scale_width, scale_height, scale_filter_mode, rotation_degree);
} catch (Exception e) {
Log.e(TAG, "PostLayerImageYUV420888ByteBuffer Exception:", e);
return false;
} finally {
read_lock_.unlock();
}
}以Camera2采集对接为例,Camera2RtmpPusherActivity.java调用RTMP推送端参数设置如下:
/*
* Camera2RtmpPusherActivity.java
* Created by daniusdk.com
* WeChat: xinsheng120
*/
private boolean initialize_publisher(SmartPublisherJniV2 lib_publisher, long handle, int width, int height, int fps, int gop) {
if (null == lib_publisher) {
Log.e(TAG, "initialize_publisher lib_publisher is null");
return false;
}
if (0 == handle) {
Log.e(TAG, "initialize_publisher handle is 0");
return false;
}
if (videoEncodeType == 1) {
int kbps = LibPublisherWrapper.estimate_video_hardware_kbps(width, height, fps, true);
Log.i(TAG, "h264HWKbps: " + kbps);
int isSupportH264HWEncoder = lib_publisher.SetSmartPublisherVideoHWEncoder(handle, kbps);
if (isSupportH264HWEncoder == 0) {
lib_publisher.SetNativeMediaNDK(handle, 0);
lib_publisher.SetVideoHWEncoderBitrateMode(handle, 1); // 0:CQ, 1:VBR, 2:CBR
lib_publisher.SetVideoHWEncoderQuality(handle, 39);
lib_publisher.SetAVCHWEncoderProfile(handle, 0x08); // 0x01: Baseline, 0x02: Main, 0x08: High
lib_publisher.SetAVCHWEncoderLevel(handle, 0x1000); // Level 4.1 多数情况下,这个够用了
Log.i(TAG, "Great, it supports h.264 hardware encoder!");
}
} else if (videoEncodeType == 2) {
int kbps = LibPublisherWrapper.estimate_video_hardware_kbps(width, height, fps, false);
Log.i(TAG, "hevcHWKbps: " + kbps);
int isSupportHevcHWEncoder = lib_publisher.SetSmartPublisherVideoHevcHWEncoder(handle, kbps);
if (isSupportHevcHWEncoder == 0) {
lib_publisher.SetNativeMediaNDK(handle, 0);
lib_publisher.SetVideoHWEncoderBitrateMode(handle, 1); // 0:CQ, 1:VBR, 2:CBR
lib_publisher.SetVideoHWEncoderQuality(handle, 39);
Log.i(TAG, "Great, it supports hevc hardware encoder!");
}
}
boolean is_sw_vbr_mode = true;
//H.264 software encoder
if (is_sw_vbr_mode) {
int is_enable_vbr = 1;
int video_quality = LibPublisherWrapper.estimate_video_software_quality(width, height, true);
int vbr_max_kbps = LibPublisherWrapper.estimate_video_vbr_max_kbps(width, height, fps);
lib_publisher.SmartPublisherSetSwVBRMode(handle, is_enable_vbr, video_quality, vbr_max_kbps);
}
if (is_pcma_) {
lib_publisher.SmartPublisherSetAudioCodecType(handle, 3);
} else {
lib_publisher.SmartPublisherSetAudioCodecType(handle, 1);
}
lib_publisher.SetSmartPublisherEventCallbackV2(handle, new EventHandlerPublisherV2().set(handler_, record_executor_));
lib_publisher.SmartPublisherSetSWVideoEncoderProfile(handle, 3);
lib_publisher.SmartPublisherSetSWVideoEncoderSpeed(handle, 2);
lib_publisher.SmartPublisherSetGopInterval(handle, gop);
lib_publisher.SmartPublisherSetFPS(handle, fps);
// lib_publisher.SmartPublisherSetSWVideoBitRate(handle, 600, 1200);
boolean is_noise_suppression = true;
lib_publisher.SmartPublisherSetNoiseSuppression(handle, is_noise_suppression ? 1 : 0);
boolean is_agc = false;
lib_publisher.SmartPublisherSetAGC(handle, is_agc ? 1 : 0);
int echo_cancel_delay = 0;
lib_publisher.SmartPublisherSetEchoCancellation(handle, 1, echo_cancel_delay);
return true;
}
private void InitAndSetConfig() {
if (null == libPublisher)
return;
if (!stream_publisher_.empty())
return;
Log.i(TAG, "InitAndSetConfig video width: " + video_width_ + ", height" + video_height_ + " imageRotationDegree:" + cameraImageRotationDegree_);
int audio_opt = 1;
long handle = libPublisher.SmartPublisherOpen(context_, audio_opt, 3, video_width_, video_height_);
if (0==handle) {
Log.e(TAG, "sdk open failed!");
return;
}
Log.i(TAG, "publisherHandle=" + handle);
int fps = 25;
int gop = fps * 3;
initialize_publisher(libPublisher, handle, video_width_, video_height_, fps, gop);
stream_publisher_.set(libPublisher, handle);
}从上文得知,目前我们做摄像头采集,已经主推Camera2,为什么要用Camera2呢?Camera2的采集优势在哪里呢?
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。