摄像头
机器人的摄像头有的是单摄,有的是双摄;不同产品线摄像头使用不同,详见下面文档。
豹小秘1
单摄,可通过Android 标准 API 打开
豹小秘2和豹小秘mini
双摄,前置摄像头可以使用 Android 标准 API 打开,也可以使用共享流获取相机预览数据(通过 Android 标准 API 打开前置摄像头,会造成机器人视觉算法暂时无法使用,释放后,一定时间后会恢复视觉算法能力)。后置摄像头只能使用Android 标准 API 打开。
招财豹和招财豹Pro
单摄,前置摄像头可以使用 Android 标准 API 打开,也可以使用共享流获取相机预览数据(通过 Android 标准 API 打开前置摄像头,会造成机器人视觉算法暂时无法使用,释放后,一定时间后会恢复视觉算法能力)。
注意
使用Android的API打开相机,第一次授权APP使用摄像头时,APP可能会崩溃,但之后使用就没有限制了。如果遇到授权摄像头崩溃,重新开启即可,之后可正常使用摄像头。
横屏机器人注意:通过Android标准方式启动之后方向是选择90°的,这是因为机器人没有陀螺仪,程序中默认是竖屏摄像头,但机器人实际是横屏设备。解决方案很简单,通过Android API(或其它各家视频sdk的API)设置摄像头旋转-90°或摄像头旋转270°即可纠正。
以下Demo为Android相机API使用,仅供参考,如不满足,请自行根据需求上网查询
Camera1的Demo
Camera2的Demo
摄像头数据流共享
因为机器人视觉能力VisionSDK需要使用摄像头,如果摄像头被二次开发的程序占用,会导致机器人人脸检测、人脸识别不可用。如果想既使用摄像头,又不影响机器人视觉功能,需要使用共享数据流的方式获取VisionSDK中的摄像头数据。摄像头数据通过SurfaceShareApi获取。它主要有三个关键方法:
- startRequest() : 开启共享数据流
- onImageAvailable(ImageReader reader) :这是一个回调,所有共享流里的图像数据从这里出来
- stopPushStream() :关闭共享数据流
共享流方式获取图像数据内存和CPU开销都比较大,在不使用的时候一定记得关闭,避免引起OOM。共享数据流取到的图片是YUV格式的,要渲染到surfaceview需要转换为bitmap。
具体参加下面代码:
package com.ainirobot.videocall.control.sufaceshareengine;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.NonNull;
import android.util.Log;
import android.view.Surface;
import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareApi;
import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareBean;
import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareError;
import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareListener;
import com.ainirobot.videocall.tools.ImageUtils;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
/**
* 通过Vision SDK获取视频数据
*/
public class SurfaceShareDataEngine {
private static final String TAG = "SurfaceShareDataEngine";
public static final int IMAGE_DATA_FOR_LIVEME = 0;
public static final int IMAGE_DATA_FOR_LOCAL_PREVIEW = 1;
public static final int VISION_IMAGE_WIDTH = 640;
public static final int VISION_IMAGE_HEIGHT = 480;
private static final int MAX_CACHE_IMAGES = 4; //maxImages 保持和VisionSdk一致
private MessageCallBack mCallBack;
private ImageReader mImageReader;
private Handler mHandler;
private HandlerThread mBackgroundThread;
private SurfaceShareBean mSurfaceShareBean;
private int mCurrentError = 0;
private long log_interval = 0;
private MyHandler mImageHandler;
private HandlerThread mImageHandleThread;
private Surface mRemoteSurface;
private static class InstanceHolder {
public static final SurfaceShareDataEngine instance = new SurfaceShareDataEngine();
}
public static SurfaceShareDataEngine getInstance() {
return InstanceHolder.instance;
}
private SurfaceShareDataEngine() {}
public void setMessageCallBack(MessageCallBack callBack){
Log.i(TAG, "set message callBack: " + callBack);
this.mCallBack = callBack;
}
private boolean initSurfaceResource() {
Log.d(TAG, "initSurfaceResource ..");
startBackgroundThread();
boolean init = initImageSurface();
if (!init) {
Log.e(TAG, "Remote Surface instance has been used");
return false;
}
startHandleImageThread();
return true;
}
private void startHandleImageThread() {
mImageHandleThread = new HandlerThread("ImageHandleThread");
mImageHandleThread.start();
mImageHandler = new MyHandler(mImageHandleThread.getLooper());
}
public void startRequest() {
initSurfaceResource();
if (mSurfaceShareBean == null) {
mSurfaceShareBean = new SurfaceShareBean();
mSurfaceShareBean.setName("VideoCall");
}
mRemoteSurface = mImageReader.getSurface();
Log.d(TAG, "startRequest getSurface:" + mRemoteSurface);
SurfaceShareApi.getInstance().requestImageFrame(mRemoteSurface, mSurfaceShareBean, new SurfaceShareListener() {
@Override
public void onError(int error, String message) {
super.onError(error, message);
Log.d(TAG, "requestImageFrame onError error :" + error);
mCurrentError = error;
if (mCallBack != null) {
mCallBack.onError(error);
}
}
@Override
public void onStatusUpdate(int status, String message) {
super.onStatusUpdate(status, message);
Log.d(TAG, "requestImageFrame onStatusUpdate status:" + status);
}
});
}
private boolean initImageSurface() {
if (mImageReader != null) {
Log.d(TAG, "initImageSurface has an ImageReader instance already!");
return false;
}
mImageReader = ImageReader.newInstance(VISION_IMAGE_WIDTH , VISION_IMAGE_HEIGHT, ImageFormat.YUV_420_888, MAX_CACHE_IMAGES);
mImageReader.setOnImageAvailableListener(new MyOnImageAvailableListener(), mHandler);
return true;
}
private void startBackgroundThread() {
if (mBackgroundThread != null && mHandler != null) {
Log.d(TAG, "startBackgroundThread has already start a mBackgroundThread");
return;
}
mBackgroundThread = new HandlerThread("ExternalBackground");
mBackgroundThread.start();
mHandler = new Handler(mBackgroundThread.getLooper());
}
private class MyOnImageAvailableListener implements ImageReader.OnImageAvailableListener {
@Override
public void onImageAvailable(ImageReader reader) {
long startTime = System.currentTimeMillis();
Image image = null;
try {
image = reader.acquireLatestImage();
if (image == null){
Log.d(TAG, "onImageAvailable image is null");
return;
}
if (mBackgroundThread == null){
Log.d(TAG, "onImageAvailable mBackgroundThread is null");
return;
}
//以下是猎豹的业务逻辑,此处可以改为其它业务逻辑。注意处理完成后一定要释放图片,否则极容易引起OOM崩溃
//pushYuv(image);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (log_interval % 60 == 0){
Log.d(TAG, "onImageAvailable push image time:"+(System.currentTimeMillis() - startTime)+"ms");
}
log_interval++;
if (image != null) {
image.close();
}
}
}
}
private class MyHandler extends Handler{
MyHandler(Looper looper){
super(looper);
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if (msg != null){
Map<Integer, byte[]> data = (Map<Integer, byte[]>) msg.obj;
if (mCallBack != null) {
mCallBack.onData(data);
}
}
}
}
private void stopBackgroundThread() {
if (mBackgroundThread == null) {
Log.i(TAG, "the background thread is null");
return;
}
Log.i(TAG, "stopBackgroundThread thread id:" + mBackgroundThread.getName() + "," + mBackgroundThread.getThreadId());
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join(50);
mBackgroundThread = null;
mHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void stopHandleThread() {
if (mImageHandleThread == null) {
Log.i(TAG, "the mImageHandleThread is null");
return;
}
mImageHandleThread.quitSafely();
try {
mImageHandleThread.join(50);
mImageHandleThread = null;
mImageHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closeImageReader() {
Log.d(TAG, "closeImageReader == ");
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
Log.d(TAG, "closeImageReader mRemoteSurface release");
if (mRemoteSurface != null){
mRemoteSurface.release();
mRemoteSurface = null;
}
}
public void stopPushStream() {
if (mCurrentError == SurfaceShareError.ERROR_SURFACE_SHARE_USED){
Log.d(TAG, "stopPushStream ERROR_SURFACE_SHARE_USED .");
return;
}
SurfaceShareApi.getInstance().abandonImageFrame(mSurfaceShareBean);
stopBackgroundThread();
stopHandleThread();
closeImageReader();
mSurfaceShareBean = null;
mCurrentError = 0;
log_interval = 0;
}
public interface MessageCallBack{
void onData(Map<Integer, byte[]> bytes);
void onError(int error);
}
}
在猎户的机器人代码中的逻辑代码是这样处理YUV图像的:
pushYuv(image);
private void pushYuv(@NonNull Image image) {
byte[] yuv = new byte[image.getHeight() * image.getWidth() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
if (log_interval % 60 == 0){
Log.d(TAG, "pushYuv w:" + image.getWidth() + ",h:" + image.getHeight() + ",planes.len:" + image.getPlanes().length+", log_interval:"+log_interval);
}
readImageIntoBuffer(image, yuv);
// frameMirror(yuv, image.getWidth(), image.getHeight());
if (mImageHandler == null){
Log.d(TAG, "pushYuv mImageHandler is null");
return;
}
byte[] localPreviewData = ImageUtils.generateNV21Data(image);
Map<Integer, byte[]> dataMap = new HashMap<>();
dataMap.put(IMAGE_DATA_FOR_LIVEME, yuv);
dataMap.put(IMAGE_DATA_FOR_LOCAL_PREVIEW, localPreviewData);
Message.obtain(mImageHandler,0, dataMap).sendToTarget();
}
public static void conver_argb_to_i420(byte[] i420, byte[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0; // Y start index
int uIndex = frameSize; // U statt index
int vIndex = frameSize * 5 / 4; // V start index: w*h*5/4
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// I420(YUV420p) -> YYYYYYYY UU VV
i420[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && i % 2 == 0) {
i420[uIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
i420[vIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
private static void readImageIntoBuffer(Image image, byte[] data) {
int width = image.getWidth();
int height = image.getHeight();
Image.Plane[] planes = image.getPlanes();
if (planes == null) {
Log.i(TAG, "get image planes is null");
return;
}
int offset = 0;
int planeLen = planes.length;
for (int plane = 0; plane < planeLen; plane++) {
ByteBuffer buffer = planes[plane].getBuffer();
int rowStride = planes[plane].getRowStride();
int pixelStride = planes[plane].getPixelStride();
int planeWidth = plane == 0 ? width : width / 2;
int planeHeight = plane == 0 ? height : height / 2;
if ((pixelStride == 1) && (rowStride == planeWidth)) {
buffer.get(data, offset, planeWidth * planeHeight);
offset += planeWidth * planeHeight;
} else {
byte[] rowData = new byte[rowStride];
for (int row = 0; row < planeHeight - 1; row++) {
buffer.get(rowData, 0, rowStride);
for (int col = 0; col < planeWidth; col++) {
data[(offset++)] = rowData[(col * pixelStride)];
}
}
buffer.get(rowData, 0, Math.min(rowStride, buffer.remaining()));
for (int col = 0; col < planeWidth; col++) {
data[(offset++)] = rowData[(col * pixelStride)];
}
}
}
}
public static byte[] frameMirror(byte[] data, int width, int height) {
byte tempData;
for (int i = 0; i < height * 3 / 2; i++) {
for (int j = 0; j < width / 2; j++) {
tempData = data[i * width + j];
data[i * width + j] = data[(i + 1) * width - 1 - j];
data[(i + 1) * width - 1 - j] = tempData;
}
}
return data;
}
public static byte[] readRgbaImageToBuffer(Image imageFrame) {
int w = imageFrame.getWidth();
int h = imageFrame.getHeight();
Image.Plane[] planes = imageFrame.getPlanes();
if (planes == null) {
Log.i(TAG, "get image planes is null");
return null;
}
ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * w;
Log.d(TAG, "readImageIntoBuffer: rowStride:" + rowStride + ",pixelStride:" + pixelStride + ",rowPadding:" + rowPadding);
Bitmap bitmap = Bitmap.createBitmap(w + rowPadding / pixelStride, h, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
byte[] pixels = new byte[w * h * 4]; // Allocate for RGB
int k = 0;
for (int x = 0; x < h; x++) {
for (int y = 0; y < w; y++) {
int color = bitmap.getPixel(y, x);
// rgba
pixels[k * 4 + 0] = (byte) Color.red(color);
pixels[k * 4 + 1] = (byte) Color.green(color);
pixels[k * 4 + 2] = (byte) Color.blue(color);
pixels[k * 4 + 3] = (byte) Color.alpha(color);
k++;
}
}
return pixels;
}
SurfaceShareDataEngine.java