Skip to content
This repository has been archived by the owner on Nov 12, 2019. It is now read-only.

Commit

Permalink
add Google ScreenCapture
Browse files Browse the repository at this point in the history
  • Loading branch information
RWebRTC committed Nov 23, 2016
1 parent bb2f66a commit d7a73d9
Show file tree
Hide file tree
Showing 50 changed files with 3,075 additions and 2,469 deletions.
3 changes: 2 additions & 1 deletion RAppRTC-libs-src/README-api
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ This directory holds a Java implementation of the webrtc::PeerConnection API, as
well as the JNI glue C++ code that lets the Java implementation reuse the C++
implementation of the same API.

To build the Java API and related tests, build with OS=android in $GYP_DEFINES.
To build the Java API and related tests, generate GN projects with:
--args='target_os="android"'

To use the Java API, start by looking at the public interface of
org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,10 @@

package org.webrtc;

import org.webrtc.CameraEnumerationAndroid.CaptureFormat;

import android.os.SystemClock;

import java.util.ArrayList;
import java.util.List;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;

@SuppressWarnings("deprecation")
public class Camera1Enumerator implements CameraEnumerator {
Expand Down Expand Up @@ -89,7 +87,7 @@ private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
if (cachedSupportedFormats == null) {
cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
cachedSupportedFormats.add(enumerateFormats(i));
}
}
Expand Down Expand Up @@ -164,7 +162,7 @@ static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRan
static int getCameraIndex(String deviceName) {
Logging.d(TAG, "getCameraIndex: " + deviceName);
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
if (deviceName.equals(getDeviceName(i))) {
return i;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,17 @@

package org.webrtc;

import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.Metrics.Histogram;

import android.content.Context;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
import android.view.WindowManager;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.Metrics.Histogram;

@SuppressWarnings("deprecation")
public class Camera1Session implements CameraSession {
Expand All @@ -33,6 +31,8 @@ public class Camera1Session implements CameraSession {
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
private static final Histogram camera1StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());

private static enum SessionState { RUNNING, STOPPED }

Expand Down Expand Up @@ -138,6 +138,7 @@ private static CaptureFormat findClosestCaptureFormat(

final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);

return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,6 @@

package org.webrtc;

import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.Metrics.Histogram;

import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
Expand All @@ -28,10 +25,11 @@
import android.util.Range;
import android.view.Surface;
import android.view.WindowManager;

import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.Metrics.Histogram;

@TargetApi(21)
public class Camera2Session implements CameraSession {
Expand All @@ -41,6 +39,8 @@ public class Camera2Session implements CameraSession {
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
private static final Histogram camera2StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());

private static enum SessionState { RUNNING, STOPPED }

Expand Down Expand Up @@ -345,6 +345,7 @@ private void findCaptureFormat() {
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,41 @@
import static java.lang.Math.abs;

import android.graphics.ImageFormat;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.webrtc.Metrics.Histogram;

@SuppressWarnings("deprecation")
public class CameraEnumerationAndroid {
private final static String TAG = "CameraEnumerationAndroid";

static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
// 0, Unknown resolution
new Size(160, 120), // 1, QQVGA
new Size(240, 160), // 2, HQVGA
new Size(320, 240), // 3, QVGA
new Size(400, 240), // 4, WQVGA
new Size(480, 320), // 5, HVGA
new Size(640, 360), // 6, nHD
new Size(640, 480), // 7, VGA
new Size(768, 480), // 8, WVGA
new Size(854, 480), // 9, FWVGA
new Size(800, 600), // 10, SVGA
new Size(960, 540), // 11, qHD
new Size(960, 640), // 12, DVGA
new Size(1024, 576), // 13, WSVGA
new Size(1024, 600), // 14, WVSGA
new Size(1280, 720), // 15, HD
new Size(1280, 1024), // 16, SXGA
new Size(1920, 1080), // 17, Full HD
new Size(1920, 1440), // 18, Full HD 4:3
new Size(2560, 1440), // 19, QHD
new Size(3840, 2160) // 20, UHD
));

public static class CaptureFormat {
// Class to represent a framerate range. The framerate varies because of lightning conditions.
// The values are multiplied by 1000, so 1000 represents one frame per second.
Expand Down Expand Up @@ -114,51 +140,6 @@ public int hashCode() {
}
}

/**
* @deprecated
* Please use Camera1Enumerator.getDeviceNames() instead.
*/
@Deprecated
public static String[] getDeviceNames() {
return new Camera1Enumerator().getDeviceNames();
}

/**
* @deprecated
* Please use Camera1Enumerator.getDeviceNames().length instead.
*/
@Deprecated
public static int getDeviceCount() {
return new Camera1Enumerator().getDeviceNames().length;
}

/**
* @deprecated
* Please use Camera1Enumerator.getDeviceNames().get(index) instead.
*/
@Deprecated
public static String getDeviceName(int index) {
return new Camera1Enumerator().getDeviceName(index);
}

/**
* @deprecated
* Please use Camera1Enumerator.isFrontFacing(String deviceName) instead.
*/
@Deprecated
public static String getNameOfFrontFacingDevice() {
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}

/**
* @deprecated
* Please use Camera1Enumerator.isBackFacing(String deviceName) instead.
*/
@Deprecated
public static String getNameOfBackFacingDevice() {
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
}

// Helper class for finding the closest supported format for the two functions below. It creates a
// comparator based on the difference to some requested parameters, where the element with the
// minimum difference is the element that is closest to the requested parameters.
Expand Down Expand Up @@ -216,18 +197,11 @@ int diff(Size size) {
});
}

private static String getNameOfDevice(int facing) {
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
try {
android.hardware.Camera.getCameraInfo(i, info);
if (info.facing == facing) {
return getDeviceName(i);
}
} catch (Exception e) {
Logging.e(TAG, "getCameraInfo() failed on index " + i, e);
}
}
return null;
// Helper method for camera classes.
static void reportCameraResolution(Histogram histogram, Size resolution) {
int index = COMMON_RESOLUTIONS.indexOf(resolution);
// 0 is reserved for unknown resolution, so add 1.
// indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
histogram.addSample(index + 1);
}
}
32 changes: 21 additions & 11 deletions RAppRTC-libs-src/libjingle_peerconnection_java-src/EglRenderer.java
Original file line number Diff line number Diff line change
Expand Up @@ -435,8 +435,7 @@ public void renderFrame(VideoRenderer.I420Frame frame) {
/**
* Release EGL surface. This function will block until the EGL surface is released.
*/
public void releaseEglSurface() {
final CountDownLatch completionLatch = new CountDownLatch(1);
public void releaseEglSurface(final Runnable completionCallback) {
// Ensure that the render thread is no longer touching the Surface before returning from this
// function.
eglSurfaceCreationRunnable.setSurface(null /* surface */);
Expand All @@ -450,14 +449,13 @@ public void run() {
eglBase.detachCurrent();
eglBase.releaseSurface();
}
completionLatch.countDown();
completionCallback.run();
}
});
} else {
completionLatch.countDown();
return;
}
}
ThreadUtils.awaitUninterruptibly(completionLatch);
completionCallback.run();
}

/**
Expand Down Expand Up @@ -535,6 +533,8 @@ private void renderFrameOnRenderThread() {
// After a surface size change, the EGLSurface might still have a buffer of the old size in the
// pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
// changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
final int drawnFrameWidth;
final int drawnFrameHeight;
synchronized (layoutLock) {
int surfaceClearCount = 0;
while (eglBase.surfaceWidth() != surfaceWidth || eglBase.surfaceHeight() != surfaceHeight) {
Expand All @@ -550,11 +550,20 @@ private void renderFrameOnRenderThread() {
}
final float[] layoutMatrix;
if (layoutAspectRatio > 0) {
layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, frame.rotatedWidth() / (float) frame.rotatedHeight(), layoutAspectRatio);
final float frameAspectRatio = frame.rotatedWidth() / (float) frame.rotatedHeight();
layoutMatrix = RendererCommon.getLayoutMatrix(mirror, frameAspectRatio, layoutAspectRatio);
if (frameAspectRatio > layoutAspectRatio) {
drawnFrameWidth = (int) (frame.rotatedHeight() * layoutAspectRatio);
drawnFrameHeight = frame.rotatedHeight();
} else {
drawnFrameWidth = frame.rotatedWidth();
drawnFrameHeight = (int) (frame.rotatedWidth() / layoutAspectRatio);
}
} else {
layoutMatrix =
mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix();
drawnFrameWidth = frame.rotatedWidth();
drawnFrameHeight = frame.rotatedHeight();
}
drawMatrix = RendererCommon.multiplyMatrices(texMatrix, layoutMatrix);
}
Expand All @@ -569,12 +578,13 @@ private void renderFrameOnRenderThread() {
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
}

yuvUploader.uploadYuvData(
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
drawer.drawYuv(yuvTextures, drawMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
surfaceWidth, surfaceHeight);
drawer.drawYuv(yuvTextures, drawMatrix, drawnFrameWidth, drawnFrameHeight, 0, 0, surfaceWidth,
surfaceHeight);
} else {
drawer.drawOes(frame.textureId, drawMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
drawer.drawOes(frame.textureId, drawMatrix, drawnFrameWidth, drawnFrameHeight, 0, 0,
surfaceWidth, surfaceHeight);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,16 @@ static public Histogram createCounts(String name, int min, int max, int bucketCo
return new Histogram(nativeCreateCounts(name, min, max, bucketCount), name);
}

static public Histogram createEnumeration(String name, int max) {
return new Histogram(nativeCreateEnumeration(name, max), name);
}

public void addSample(int sample) {
nativeAddSample(handle, sample);
}

private static native long nativeCreateCounts(String name, int min, int max, int bucketCount);
private static native long nativeCreateEnumeration(String name, int max);
private static native void nativeAddSample(long handle, int sample);
}

Expand Down
2 changes: 0 additions & 2 deletions RAppRTC-libs-src/libjingle_peerconnection_java-src/OWNERS
Original file line number Diff line number Diff line change
@@ -1,4 +1,2 @@
[email protected]

per-file Camera*[email protected]
per-file [email protected]
Loading

0 comments on commit d7a73d9

Please sign in to comment.