Skip to content Skip to main navigation Skip to footer

Camera data stream sharing

Because the VisionSDK for robot vision requires the use of a camera, if the camera is occupied by a redeveloped program, it will result in the unavailability of robot face detection and recognition. If you want to use a camera without affecting the robot’s visual function, you need to use a shared data stream to obtain camera data from the VisionSDK. Camera data is obtained through SurfaceShareApi. It mainly has three key methods:

  1. startRequest() : Enable shared data flow
  2. onImageAvailable(ImageReader reader) :This is a callback where all image data in the shared stream comes out
  3. stopPushStream() :Close shared data stream

The shared stream method for obtaining image data has high memory and CPU overhead, so be sure to turn it off when not in use to avoid causing OOM. The images obtained from the shared data stream are in YUV format, and to be rendered to the surface view, they need to be converted to bitmap.

Please refer to the following code for details:

package com.ainirobot.videocall.control.sufaceshareengine;

import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.NonNull;
import android.util.Log;
import android.view.Surface;

import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareApi;
import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareBean;
import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareError;
import com.ainirobot.coreservice.client.surfaceshare.SurfaceShareListener;
import com.ainirobot.videocall.tools.ImageUtils;

import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;

/**
 * Obtain video data through the Vision SDK
 */
public class SurfaceShareDataEngine {

    private static final String TAG = "SurfaceShareDataEngine";
    public static final int IMAGE_DATA_FOR_LIVEME = 0;
    public static final int IMAGE_DATA_FOR_LOCAL_PREVIEW = 1;

    public static final int VISION_IMAGE_WIDTH = 640;
    public static final int VISION_IMAGE_HEIGHT = 480;
    private static final int MAX_CACHE_IMAGES = 4; //maxImages 保持和VisionSdk一致

    private MessageCallBack mCallBack;
    private ImageReader mImageReader;

    private Handler mHandler;
    private HandlerThread mBackgroundThread;
    private SurfaceShareBean mSurfaceShareBean;
    private int mCurrentError = 0;
    private long log_interval = 0;
    private MyHandler mImageHandler;
    private HandlerThread mImageHandleThread;
    private Surface mRemoteSurface;

    private static class InstanceHolder {
        public static final SurfaceShareDataEngine instance = new SurfaceShareDataEngine();
    }

    public static SurfaceShareDataEngine getInstance() {
        return InstanceHolder.instance;
    }

    private SurfaceShareDataEngine() {}

    public void setMessageCallBack(MessageCallBack callBack){
        Log.i(TAG, "set message callBack: " + callBack);
        this.mCallBack = callBack;
    }

    private boolean initSurfaceResource() {
        Log.d(TAG, "initSurfaceResource ..");
        startBackgroundThread();
        boolean init = initImageSurface();
        if (!init) {
            Log.e(TAG, "Remote Surface instance has been used");
            return false;
        }
        startHandleImageThread();
        return true;
    }

    private void startHandleImageThread() {
        mImageHandleThread = new HandlerThread("ImageHandleThread");
        mImageHandleThread.start();
        mImageHandler = new MyHandler(mImageHandleThread.getLooper());
    }

    public void startRequest() {
        initSurfaceResource();
        if (mSurfaceShareBean == null) {
            mSurfaceShareBean = new SurfaceShareBean();
            mSurfaceShareBean.setName("VideoCall");
        }

        mRemoteSurface = mImageReader.getSurface();
        Log.d(TAG, "startRequest getSurface:" + mRemoteSurface);
        SurfaceShareApi.getInstance().requestImageFrame(mRemoteSurface, mSurfaceShareBean, new SurfaceShareListener() {

            @Override
            public void onError(int error, String message) {
                super.onError(error, message);
                Log.d(TAG, "requestImageFrame onError error :" + error);
                mCurrentError = error;
                if (mCallBack != null) {
                    mCallBack.onError(error);
                }
            }

            @Override
            public void onStatusUpdate(int status, String message) {
                super.onStatusUpdate(status, message);
                Log.d(TAG, "requestImageFrame onStatusUpdate status:" + status);
            }
        });
    }

    private boolean initImageSurface() {
        if (mImageReader != null) {
            Log.d(TAG, "initImageSurface has an ImageReader instance already!");
            return false;
        }
        mImageReader = ImageReader.newInstance(VISION_IMAGE_WIDTH , VISION_IMAGE_HEIGHT, ImageFormat.YUV_420_888, MAX_CACHE_IMAGES);
        mImageReader.setOnImageAvailableListener(new MyOnImageAvailableListener(), mHandler);
        return true;
    }

    private void startBackgroundThread() {
        if (mBackgroundThread != null && mHandler != null) {
            Log.d(TAG, "startBackgroundThread has already start a mBackgroundThread");
            return;
        }
        mBackgroundThread = new HandlerThread("ExternalBackground");
        mBackgroundThread.start();
        mHandler = new Handler(mBackgroundThread.getLooper());
    }

    private class MyOnImageAvailableListener implements ImageReader.OnImageAvailableListener {

        @Override
        public void onImageAvailable(ImageReader reader) {
            long startTime = System.currentTimeMillis();
            Image image = null;
            try {
                image = reader.acquireLatestImage();
                if (image == null){
                    Log.d(TAG, "onImageAvailable image is null");
                    return;
                }
                if (mBackgroundThread == null){
                    Log.d(TAG, "onImageAvailable mBackgroundThread is null");
                    return;
                }
//The following is the business logic of orionstar, which can be changed to other business logic here. Be sure to release the image after processing, otherwise it can easily cause OOM crashes
                //pushYuv(image);

            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                if (log_interval % 60 == 0){
                    Log.d(TAG, "onImageAvailable push image time:"+(System.currentTimeMillis() - startTime)+"ms");
                }
                log_interval++;

                if (image != null) {
                    image.close();
                }
            }
        }
    }

    private class MyHandler extends Handler{

        MyHandler(Looper looper){
            super(looper);
        }

        @Override
        public void handleMessage(Message msg) {
            super.handleMessage(msg);
            if (msg != null){
                Map<Integer, byte[]> data = (Map<Integer, byte[]>) msg.obj;
                if (mCallBack != null) {
                    mCallBack.onData(data);
                }
            }
        }
    }

    private void stopBackgroundThread() {
        if (mBackgroundThread == null) {
            Log.i(TAG, "the background thread is null");
            return;
        }

        Log.i(TAG, "stopBackgroundThread thread id:" + mBackgroundThread.getName() + "," + mBackgroundThread.getThreadId());
        mBackgroundThread.quitSafely();
        try {
            mBackgroundThread.join(50);
            mBackgroundThread = null;
            mHandler = null;
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    private void stopHandleThread() {
        if (mImageHandleThread == null) {
            Log.i(TAG, "the mImageHandleThread is null");
            return;
        }
        mImageHandleThread.quitSafely();
        try {
            mImageHandleThread.join(50);
            mImageHandleThread = null;
            mImageHandler = null;
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    private void closeImageReader() {
        Log.d(TAG, "closeImageReader == ");
        if (mImageReader != null) {
            mImageReader.close();
            mImageReader = null;
        }
        Log.d(TAG, "closeImageReader mRemoteSurface release");
        if (mRemoteSurface != null){
            mRemoteSurface.release();
            mRemoteSurface = null;
        }
    }

    public void stopPushStream() {
        if (mCurrentError == SurfaceShareError.ERROR_SURFACE_SHARE_USED){
            Log.d(TAG, "stopPushStream ERROR_SURFACE_SHARE_USED .");
            return;
        }
        SurfaceShareApi.getInstance().abandonImageFrame(mSurfaceShareBean);
        stopBackgroundThread();
        stopHandleThread();
        closeImageReader();
        mSurfaceShareBean = null;
        mCurrentError = 0;
        log_interval = 0;
    }

    public interface MessageCallBack{
        void onData(Map<Integer, byte[]> bytes);
        void onError(int error);
    }
}

The logical code in orionstar’s robot code is as follows:

    pushYuv(image);



    private void pushYuv(@NonNull Image image) {
        byte[] yuv = new byte[image.getHeight() * image.getWidth() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
        if (log_interval % 60 == 0){
            Log.d(TAG, "pushYuv w:" + image.getWidth() + ",h:" + image.getHeight() + ",planes.len:" + image.getPlanes().length+", log_interval:"+log_interval);
        }
        readImageIntoBuffer(image, yuv);

//        frameMirror(yuv, image.getWidth(), image.getHeight());

        if (mImageHandler == null){
            Log.d(TAG, "pushYuv mImageHandler is null");
            return;
        }

        byte[] localPreviewData = ImageUtils.generateNV21Data(image);

        Map<Integer, byte[]> dataMap = new HashMap<>();
        dataMap.put(IMAGE_DATA_FOR_LIVEME, yuv);
        dataMap.put(IMAGE_DATA_FOR_LOCAL_PREVIEW, localPreviewData);
        Message.obtain(mImageHandler,0, dataMap).sendToTarget();
    }


    public static void conver_argb_to_i420(byte[] i420, byte[] argb, int width, int height) {
        final int frameSize = width * height;

        int yIndex = 0;                   // Y start index
        int uIndex = frameSize;           // U statt index
        int vIndex = frameSize * 5 / 4; // V start index: w*h*5/4

        int a, R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {
                a = (argb[index] & 0xff000000) >> 24; //  is not used obviously
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = (argb[index] & 0xff) >> 0;

                // well known RGB to YUV algorithm
                Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
                U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
                V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;

                // I420(YUV420p) -> YYYYYYYY UU VV
                i420[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && i % 2 == 0) {
                    i420[uIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
                    i420[vIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
                }
                index++;
            }
        }
    }

    private static void readImageIntoBuffer(Image image, byte[] data) {
        int width = image.getWidth();
        int height = image.getHeight();
        Image.Plane[] planes = image.getPlanes();
        if (planes == null) {
            Log.i(TAG, "get image planes is null");
            return;
        }
        int offset = 0;
        int planeLen = planes.length;
        for (int plane = 0; plane < planeLen; plane++) {
            ByteBuffer buffer = planes[plane].getBuffer();
            int rowStride = planes[plane].getRowStride();
            int pixelStride = planes[plane].getPixelStride();
            int planeWidth = plane == 0 ? width : width / 2;
            int planeHeight = plane == 0 ? height : height / 2;
            if ((pixelStride == 1) && (rowStride == planeWidth)) {
                buffer.get(data, offset, planeWidth * planeHeight);
                offset += planeWidth * planeHeight;
            } else {
                byte[] rowData = new byte[rowStride];
                for (int row = 0; row < planeHeight - 1; row++) {
                    buffer.get(rowData, 0, rowStride);
                    for (int col = 0; col < planeWidth; col++) {
                        data[(offset++)] = rowData[(col * pixelStride)];
                    }
                }
                buffer.get(rowData, 0, Math.min(rowStride, buffer.remaining()));
                for (int col = 0; col < planeWidth; col++) {
                    data[(offset++)] = rowData[(col * pixelStride)];
                }
            }
        }
    }

    public static byte[] frameMirror(byte[] data, int width, int height) {
        byte tempData;
        for (int i = 0; i < height * 3 / 2; i++) {
            for (int j = 0; j < width / 2; j++) {
                tempData = data[i * width + j];
                data[i * width + j] = data[(i + 1) * width - 1 - j];
                data[(i + 1) * width - 1 - j] = tempData;
            }

        }
        return data;
    }

    public static byte[] readRgbaImageToBuffer(Image imageFrame) {

        int w = imageFrame.getWidth();
        int h = imageFrame.getHeight();
        Image.Plane[] planes = imageFrame.getPlanes();
        if (planes == null) {
            Log.i(TAG, "get image planes is null");
            return null;
        }
        ByteBuffer buffer = planes[0].getBuffer();
        int pixelStride = planes[0].getPixelStride();
        int rowStride = planes[0].getRowStride();
        int rowPadding = rowStride - pixelStride * w;
        Log.d(TAG, "readImageIntoBuffer: rowStride:" + rowStride + ",pixelStride:" + pixelStride + ",rowPadding:" + rowPadding);
        Bitmap bitmap = Bitmap.createBitmap(w + rowPadding / pixelStride, h, Bitmap.Config.ARGB_8888);
        bitmap.copyPixelsFromBuffer(buffer);


        byte[] pixels = new byte[w * h * 4]; // Allocate for RGB

        int k = 0;

        for (int x = 0; x < h; x++) {
            for (int y = 0; y < w; y++) {
                int color = bitmap.getPixel(y, x);
                // rgba
                pixels[k * 4 + 0] = (byte) Color.red(color);
                pixels[k * 4 + 1] = (byte) Color.green(color);
                pixels[k * 4 + 2] = (byte) Color.blue(color);
                pixels[k * 4 + 3] = (byte) Color.alpha(color);
                k++;
            }
        }

        return pixels;
    }

SurfaceShareDataEngine.java