Skip to content

Commit

Permalink
Metering APIs (#580)
Browse files Browse the repository at this point in the history
* Add cameraPictureMetering and cameraPictureSnapshotMetering

* Adapt Meter and metering package to picture use

* Simplify Full2PictureRecorder, we'll use metering package instead

* Add doMetering parameter

* Implement cameraPictureMetering and cameraPictureSnapshotMetering in engine

* Add options in demo app

* Add better logs

* Add Snapshot2PictureRecorder

* Capture the correct frame based on timestamp

* Lock AE and AWB. Account for captureBuilder changes

* Fix runtime flash changes bug

* Small changes

* Flash support for metered snapshots

* Remove AE and AWB locks

* Lock AE/AWB/AF inside the snapshot recorder

* Small changes

* Fix AutoExposure metering

* Create Locker and locking.* parameters

* Implement Locker in Camera2Engine

* Implement reset delay in Camera2Engine instead of Meter

* Simplify Snapshot2PictureRecorder

* Fix success value

* Unlock inside Camera2Engine

* Do not lock for normal gestures

* Simplify logic

* Improve locking/AutoFocus

* Fix TORCH bug

* Small changes to locking and metering

* Remove AF and AWB for testing

* Create action package

* Create OneShotAction

* Create LogAction

* Revisit Full2VideoRecorder using actions

* Revisit Full2PictureRecorder using actions

* Enable missing functionality in Snapshot2PictureRecorder

* Move Snapshot2PictureRecorder using actions, rewrite lock package

* Add TimeoutAction

* Add comments to the action package

* Add meter package

* Remove old metering package

* Fix various bugs

* Add action.abort()

* Abort old MeterAction when running new ones

* Fix various bugs

* Add doc empty page

* Add documentation

* Fix tests
  • Loading branch information
natario1 authored Sep 4, 2019
1 parent a8fddc4 commit 4ddd2af
Show file tree
Hide file tree
Showing 67 changed files with 2,965 additions and 1,375 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,8 @@ Using CameraView is extremely simple:
app:cameraAutoFocusMarker="@string/cameraview_default_autofocus_marker"
app:cameraUseDeviceOrientation="true|false"
app:cameraFilter="@string/real_time_filter"
app:cameraPictureMetering="true|false"
app:cameraPictureSnapshotMetering="false|true"
app:cameraExperimental="false|true">

<!-- Watermark! -->
Expand Down
5 changes: 4 additions & 1 deletion cameraview/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -240,9 +240,12 @@ task mergedCoverageReport(type: JacocoReport) {
classFilter.add('**/com/otaliastudios/cameraview/engine/CameraEngine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/Camera1Engine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/Camera2Engine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/action/**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/lock/**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/meter/**.*')
classFilter.add('**/com/otaliastudios/cameraview/picture/**.*')
classFilter.add('**/com/otaliastudios/cameraview/video/**.*')
// TODO these below could be testable ALSO outside of the integration tests
// TODO these below could be easily testable ALSO outside of the integration tests
classFilter.add('**/com/otaliastudios/cameraview/video/encoding/**.*')
}
// We don't test OpenGL filters.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,10 @@ public void testDefaults() {
// Self managed
GestureParser gestures = new GestureParser(empty);
assertEquals(cameraView.getPlaySounds(), CameraView.DEFAULT_PLAY_SOUNDS);
assertEquals(cameraView.getAutoFocusResetDelay(), CameraView.DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS);
assertEquals(cameraView.getUseDeviceOrientation(), CameraView.DEFAULT_USE_DEVICE_ORIENTATION);
assertEquals(cameraView.getPictureMetering(), CameraView.DEFAULT_PICTURE_METERING);
assertEquals(cameraView.getPictureSnapshotMetering(), CameraView.DEFAULT_PICTURE_SNAPSHOT_METERING);
assertEquals(cameraView.getGestureAction(Gesture.TAP), gestures.getTapAction());
assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), gestures.getLongTapAction());
assertEquals(cameraView.getGestureAction(Gesture.PINCH), gestures.getPinchAction());
Expand Down Expand Up @@ -649,6 +652,22 @@ public void testSetUseDeviceOrientation() {
assertFalse(cameraView.getUseDeviceOrientation());
}

@Test
public void testSetPictureMetering() {
cameraView.setPictureMetering(true);
assertTrue(cameraView.getPictureMetering());
cameraView.setPictureMetering(false);
assertFalse(cameraView.getPictureMetering());
}

@Test
public void testSetPictureSnapshotMetering() {
cameraView.setPictureSnapshotMetering(true);
assertTrue(cameraView.getPictureSnapshotMetering());
cameraView.setPictureSnapshotMetering(false);
assertFalse(cameraView.getPictureSnapshotMetering());
}

@Test
public void testSetFlash() {
cameraView.set(Flash.TORCH);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
package com.otaliastudios.cameraview.engine;

import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;

import com.otaliastudios.cameraview.DoNotRunOnTravis;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.engine.action.ActionHolder;
import com.otaliastudios.cameraview.engine.action.BaseAction;

import org.junit.Ignore;
import org.junit.Test;
Expand All @@ -11,6 +16,8 @@
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;

import java.util.concurrent.CountDownLatch;

/**
* These tests work great on real devices, and are the only way to test actual CameraEngine
* implementation - we really need to open the camera device.
Expand All @@ -29,7 +36,23 @@ protected Engine getEngine() {
}

@Override
public void testFrameProcessing_afterVideo() throws Exception {
super.testFrameProcessing_afterVideo();
protected void onOpenSync() {
super.onOpenSync();
// Extra wait for the first frame to be dispatched.
// This is because various classes require getLastResult to be non-null
// and that's typically the case in a real app.
Camera2Engine engine = (Camera2Engine) controller;
final CountDownLatch latch = new CountDownLatch(1);
new BaseAction() {
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
latch.countDown();
setState(STATE_COMPLETED);
}
}.start(engine);
try { latch.await(); } catch (InterruptedException ignore) {}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);

private CameraView camera;
private CameraEngine controller;
protected CameraEngine controller;
private CameraListener listener;
private Op<Throwable> uiExceptionOp;

Expand Down Expand Up @@ -136,25 +136,29 @@ private void waitForUiException() throws Throwable {
}
}

@SuppressWarnings("StatementWithEmptyBody")
private CameraOptions openSync(boolean expectSuccess) {
camera.open();
final Op<CameraOptions> open = new Op<>(true);
doEndOp(open, 0).when(listener).onCameraOpened(any(CameraOptions.class));
CameraOptions result = open.await(DELAY);
if (expectSuccess) {
assertNotNull("Can open", result);
// Extra wait for the bind and preview state, so we run tests in a fully operational
// state. If we didn't do so, we could have null values, for example, in getPictureSize
// or in getSnapshotSize.
while (controller.getBindState() != CameraEngine.STATE_STARTED) {}
while (controller.getPreviewState() != CameraEngine.STATE_STARTED) {}
onOpenSync();
} else {
assertNull("Should not open", result);
}
return result;
}

@SuppressWarnings("StatementWithEmptyBody")
protected void onOpenSync() {
// Extra wait for the bind and preview state, so we run tests in a fully operational
// state. If we didn't do so, we could have null values, for example, in getPictureSize
// or in getSnapshotSize.
while (controller.getBindState() != CameraEngine.STATE_STARTED) {}
while (controller.getPreviewState() != CameraEngine.STATE_STARTED) {}
}

private void closeSync(boolean expectSuccess) {
camera.close();
final Op<Boolean> close = new Op<>(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,12 +123,12 @@ public void takePicture(@NonNull PictureResult.Stub stub) {
}

@Override
protected void onTakePicture(@NonNull PictureResult.Stub stub) {
protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {

}

@Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) {
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering) {

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,9 @@ public void onOrientationChanged(int orientation) { }


/**
* Notifies that user interacted with the screen and started focus with a gesture,
* and the autofocus is trying to focus around that area. This can be used to draw things on screen.
* Notifies that user interacted with the screen and started metering with a gesture,
* and touch metering routine is trying to focus around that area.
* This callback can be used to draw things on screen.
* Can also be triggered by {@link CameraView#startAutoFocus(float, float)}.
*
* @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()
Expand All @@ -92,20 +93,20 @@ public void onAutoFocusStart(@NonNull PointF point) { }


/**
* Notifies that a gesture focus event just ended, and the camera converged
* to a new focus (and possibly exposure and white balance).
* Notifies that a touch metering event just ended, and the camera converged
* to a new focus, exposure and possibly white balance.
* This might succeed or not.
* Can also be triggered by {@link CameraView#startAutoFocus(float, float)}.
*
* @param successful whether camera succeeded
* @param successful whether metering succeeded
* @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()
*/
@UiThread
public void onAutoFocusEnd(boolean successful, @NonNull PointF point) { }


/**
* Noitifies that a finger gesture just caused the camera zoom
* Notifies that a finger gesture just caused the camera zoom
* to be changed. This can be used to draw, for example, a seek bar.
*
* @param newValue the new zoom value
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ public boolean isZoomSupported() {


/**
* Whether auto focus (metering with respect to a specific region of the screen) is
* Whether touch metering (metering with respect to a specific region of the screen) is
* supported. If it is, you can map gestures to {@link GestureAction#AUTO_FOCUS}
* and metering will change on tap.
*
Expand Down
Loading

0 comments on commit 4ddd2af

Please sign in to comment.