Created
July 24, 2018 10:51
-
-
Save n1xx1/2cd38043838e259969bce983ce21ffaa to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package it.inrebus.skyte.rtcclient; | |
import android.content.Context; | |
import android.graphics.Bitmap; | |
import android.graphics.Canvas; | |
import android.graphics.Color; | |
import android.graphics.Matrix; | |
import android.graphics.Paint; | |
import android.opengl.GLES20; | |
import android.opengl.GLUtils; | |
import org.webrtc.SurfaceTextureHelper; | |
import org.webrtc.TextureBufferImpl; | |
import org.webrtc.VideoCapturer; | |
import org.webrtc.VideoFrame; | |
import org.webrtc.YuvConverter; | |
public class CustomCapturer implements VideoCapturer { | |
private SurfaceTextureHelper surTexture; | |
private Context appContext; | |
private org.webrtc.CapturerObserver capturerObs; | |
private Thread captureThread; | |
@Override | |
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, org.webrtc.CapturerObserver capturerObserver) { | |
surTexture = surfaceTextureHelper; | |
appContext = applicationContext; | |
capturerObs = capturerObserver; | |
} | |
@Override | |
public void startCapture(int width, int height, int fps) { | |
captureThread = new Thread(() -> { | |
try { | |
long start = System.nanoTime(); | |
capturerObs.onCapturerStarted(true); | |
int[] textures = new int[1]; | |
GLES20.glGenTextures(1, textures, 0); | |
YuvConverter yuvConverter = new YuvConverter(); | |
TextureBufferImpl buffer = new TextureBufferImpl(width, height, VideoFrame.TextureBuffer.Type.RGB, textures[0], new Matrix(), surTexture.getHandler(), yuvConverter, null); | |
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); | |
while (true) { | |
Canvas canvas = new Canvas(bitmap); | |
canvas.drawRGB(255, 255, 255); | |
Paint paint = new Paint(); | |
paint.setColor(Color.RED); | |
paint.setAlpha(0xff); | |
canvas.drawRect(100, 100, 200, 200, paint); | |
surTexture.getHandler().post(() -> { | |
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); | |
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); | |
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); | |
VideoFrame.I420Buffer i420Buf = yuvConverter.convert(buffer); | |
long frameTime = System.nanoTime() - start; | |
VideoFrame videoFrame = new VideoFrame(i420Buf, 0, frameTime); | |
capturerObs.onFrameCaptured(videoFrame); | |
}); | |
Thread.sleep(100); | |
} | |
} catch(InterruptedException ex) { | |
ex.printStackTrace(); | |
} | |
}); | |
captureThread.start(); | |
} | |
@Override | |
public void stopCapture() { | |
captureThread.interrupt(); | |
} | |
@Override | |
public void changeCaptureFormat(int width, int height, int fps) { | |
} | |
@Override | |
public void dispose() { | |
} | |
@Override | |
public boolean isScreencast() { | |
return false; | |
} | |
} |
It works.
But should release videoFrame object after used, otherwise it will cause memory leak
Note: OpenGL has a flipped y-coordinate system compared to Android bitmaps. Will need to pass the matrix with scale to 1,-1 to correct for this.
I tried this out, but had some additional issues with tearing and framerate. I ended up using the SurfaceTextureHelper passed in initialize
to draw bitmaps to its SurfaceTexture, and it'll convert it for you.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
I want to do the same thing with you. I want to transmit video data of UAV through webrtc. Can I use this method?