Streaming CustomView ARcore with Twilio video

前端 未结 2 1890
醉梦人生
醉梦人生 2021-01-13 11:19

I\'ve a problem when i want to streaming a custom view with twilio video api along with ARcore, basically it stream a black screen. I used ViewCapturer class from example to

相关标签:
2条回答
  • 2021-01-13 12:15

    For someone who must do stream ARCore with Twilio Video

    In your ARCore render class.

    @Override
    public void onDrawFrame(GL10 gl) {
         ....
         this.takeLastFrame();
    }
    
    private byte[] takeLastFrame() {
        int height = this.mFrameHeight;
        int width = this.mFrameWidth;
    
        Mat input = new Mat(height, width, CvType.CV_8UC4);
    
        ByteBuffer buffer = ByteBuffer.allocate(input.rows() * input.cols() * input.channels());
    
        GLES20.glReadPixels(0, 0, width, height,
                GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
        input.put(0, 0, buffer.array());
    
        Core.rotate(input, input, Core.ROTATE_180);
        Core.flip(input, input, 1);
    
        return convertMatToBytes(input);
    }
    
    private byte[] convertMatToBytes(Mat image) {
        int bufferSize = image.channels() * image.cols() * image.rows();
        byte[] b = new byte[bufferSize];
        image.get(0, 0, b);
        return b;
    }
    

    In your custom capturer class

    byte[] array = view.takeLastFrame();
    if (array != null && array.length > 0) {
        final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
    
        // Create video frame
        VideoDimensions dimensions = new VideoDimensions(view.getFrameWidth(), view.getFrameHeight());
        VideoFrame videoFrame = new VideoFrame(array,
                            dimensions, VideoFrame.RotationAngle.ROTATION_0, captureTimeNs);
    
        // Notify the listener
        if (started.get()) {
            videoCapturerListener.onFrameCaptured(videoFrame);
        }
    }            
    
    0 讨论(0)
  • 2021-01-13 12:20

    Modified above code to provide stream at closest VGA resolution per Twilio docs. And added capability to swap surfaces and use null.

    package com.company.app
    
    import android.graphics.Bitmap
    import android.os.Handler
    import android.os.Looper
    import android.os.SystemClock
    import android.view.PixelCopy
    import android.view.SurfaceView
    import com.twilio.video.*
    import java.nio.ByteBuffer
    import java.util.concurrent.TimeUnit
    import java.util.concurrent.locks.ReentrantLock
    import kotlin.math.roundToLong
    
    class SurfaceViewCapturer : VideoCapturer {
        private var surfaceView: SurfaceView? = null
    
        private lateinit var viewBitmap: Bitmap
        private lateinit var videoCapturerListener: VideoCapturer.Listener
    
        private val handler = Handler(Looper.getMainLooper())
        private val handlerPixelCopy = Handler(Looper.getMainLooper())
        private var started: Boolean = false
    
        // Twilio selects closest supported VideoFormat to 640x480 at 30 frames per second.
        // https://media.twiliocdn.com/sdk/android/video/releases/1.0.0-beta17/docs/com/twilio/video/LocalVideoTrack.html
    
        private val framesPerSecond: Int = 30
        private val streamWidth: Int = VideoDimensions.VGA_VIDEO_WIDTH
        private val streamHeight: Int = VideoDimensions.VGA_VIDEO_HEIGHT
    
        private val viewCapturerFrameRateMs: Long =
            (TimeUnit.SECONDS.toMillis(1).toFloat() / framesPerSecond.toFloat()).roundToLong()
    
        private val reentrantLock = ReentrantLock()
    
        fun changeSurfaceView(surfaceView: SurfaceView?) {
            reentrantLock.lock()
            this.surfaceView = surfaceView
            reentrantLock.unlock()
        }
    
        private val viewCapturer: Runnable = object : Runnable {
            override fun run() {
                reentrantLock.lock()
    
                val surfaceView = surfaceView
    
                if (started.not()) {
                    reentrantLock.unlock()
                    return
                }
    
                if (surfaceView == null ||
                    surfaceView.width == 0 ||
                    surfaceView.height == 0 ||
                    surfaceView.holder.surface.isValid.not()
                ) {
                    handler.postDelayed(this, viewCapturerFrameRateMs)
                    reentrantLock.unlock()
                    return
                }
    
                // calculate frame width with fixed stream height while maintaining aspect ratio
                val frameWidthFixedHeight: Int = (surfaceView.width * streamHeight) / surfaceView.height
    
                // calculate frame height with fixed stream width while maintaining aspect ratio
                val frameHeightFixedWidth: Int = (surfaceView.height * streamWidth) / surfaceView.width
    
                // choose ratio that has more pixels
                val (frameWidth, frameHeight) =
                    if (frameWidthFixedHeight * streamHeight >= frameHeightFixedWidth * streamWidth) {
                        Pair(frameWidthFixedHeight, streamHeight)
                    } else {
                        Pair(streamWidth, frameHeightFixedWidth)
                    }
    
                viewBitmap = Bitmap.createBitmap(frameWidth, frameHeight, Bitmap.Config.ARGB_8888)
    
                // mutex.unlock() happens in callback
                PixelCopy.request(
                    surfaceView,
                    viewBitmap,
                    {
                        val buffer = ByteBuffer.allocate(viewBitmap.byteCount)
                        viewBitmap.copyPixelsToBuffer(buffer)
    
                        // Create video frame
                        val dimensions = VideoDimensions(frameWidth, frameHeight)
    
                        val videoFrame =
                            VideoFrame(
                                buffer.array(),
                                dimensions,
                                VideoFrame.RotationAngle.ROTATION_0,
                                TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime())
                            )
    
                        // Notify the listener
                        videoCapturerListener.onFrameCaptured(videoFrame)
                        handler.postDelayed(this, viewCapturerFrameRateMs)
                        reentrantLock.unlock()
                    },
                    handlerPixelCopy
                )
            }
        }
    
        override fun getSupportedFormats(): List<VideoFormat> =
            listOf(
                VideoFormat(
                    VideoDimensions(streamWidth, streamHeight),
                    framesPerSecond,
                    VideoPixelFormat.RGBA_8888
                )
            )
    
        override fun isScreencast(): Boolean {
            return true
        }
    
        override fun startCapture(
            captureFormat: VideoFormat,
            capturerListener: VideoCapturer.Listener
        ) {
            reentrantLock.lock()
            // Store the capturer listener
            videoCapturerListener = capturerListener
            started = true
    
            // Notify capturer API that the capturer has started
            val capturerStarted = handler.postDelayed(viewCapturer, viewCapturerFrameRateMs)
            videoCapturerListener.onCapturerStarted(capturerStarted)
            reentrantLock.unlock()
        }
    
        /**
         * Stop capturing frames. Note that the SDK cannot receive frames once this has been invoked.
         */
        override fun stopCapture() {
            reentrantLock.lock()
            started = false
            handler.removeCallbacks(viewCapturer)
            reentrantLock.unlock()
        }
    }
    
    0 讨论(0)
提交回复
热议问题