/*
 * Copyright (C) 2008 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware;

import static android.system.OsConstants.*;

import android.annotation.Nullable;
import android.annotation.SdkConstant;
import android.annotation.SdkConstant.SdkConstantType;
import android.app.ActivityThread;
import android.app.AppOpsManager;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.media.AudioAttributes;
import android.media.IAudioService;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RSIllegalArgumentException;
import android.renderscript.RenderScript;
import android.renderscript.Type;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

import com.android.internal.annotations.GuardedBy;
import com.android.internal.app.IAppOpsCallback;
import com.android.internal.app.IAppOpsService;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;

The Camera class is used to set image capture settings, start/stop preview, snap pictures, and retrieve frames for encoding for video. This class is a client for the Camera service, which manages the actual camera hardware.

To access the device camera, you must declare the CAMERA.CAMERA permission in your Android Manifest. Also be sure to include the <uses-feature> manifest element to declare camera features used by your application. For example, if you use the camera and auto-focus feature, your Manifest should include the following:

 <uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />

To take pictures with this class, use the following steps:

  1. Obtain an instance of Camera from open(int).
  2. Get existing (default) settings with getParameters().
  3. If necessary, modify the returned Parameters object and call setParameters(Parameters).
  4. Call setDisplayOrientation(int) to ensure correct orientation of preview.
  5. Important: Pass a fully initialized SurfaceHolder to setPreviewDisplay(SurfaceHolder). Without a surface, the camera will be unable to start the preview.
  6. Important: Call startPreview() to start updating the preview surface. Preview must be started before you can take a picture.
  7. When you want, call takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback) to capture a photo. Wait for the callbacks to provide the actual image data.
  8. After taking a picture, preview display will have stopped. To take more photos, call startPreview() again first.
  9. Call stopPreview() to stop updating the preview surface.
  10. Important: Call release() to release the camera for use by other applications. Applications should release the camera immediately in Activity.onPause() (and re-open() it in Activity.onResume()).

To quickly switch to video recording mode, use these steps:

  1. Obtain and initialize a Camera and start preview as described above.
  2. Call unlock() to allow the media process to access the camera.
  3. Pass the camera to MediaRecorder.setCamera(Camera). See MediaRecorder information about video recording.
  4. When finished recording, call reconnect() to re-acquire and re-lock the camera.
  5. If desired, restart preview and take more photos or videos.
  6. Call stopPreview() and release() as described above.

This class is not thread-safe, and is meant for use from one event thread. Most long-running operations (preview, focus, photo capture, etc) happen asynchronously and invoke callbacks as necessary. Callbacks will be invoked on the event thread open(int) was called from. This class's methods must never be called from multiple threads at once.

Caution: Different Android-powered devices may have different hardware specifications, such as megapixel ratings and auto-focus capabilities. In order for your application to be compatible with more devices, you should not make assumptions about the device camera specifications.

Developer Guides

For more information about using cameras, read the Camera developer guide.

Deprecated:We recommend using the new camera2 API for new applications.
/** * The Camera class is used to set image capture settings, start/stop preview, * snap pictures, and retrieve frames for encoding for video. This class is a * client for the Camera service, which manages the actual camera hardware. * * <p>To access the device camera, you must declare the * {@link android.Manifest.permission#CAMERA} permission in your Android * Manifest. Also be sure to include the * <a href="{@docRoot}guide/topics/manifest/uses-feature-element.html">&lt;uses-feature></a> * manifest element to declare camera features used by your application. * For example, if you use the camera and auto-focus feature, your Manifest * should include the following:</p> * <pre> &lt;uses-permission android:name="android.permission.CAMERA" /> * &lt;uses-feature android:name="android.hardware.camera" /> * &lt;uses-feature android:name="android.hardware.camera.autofocus" /></pre> * * <p>To take pictures with this class, use the following steps:</p> * * <ol> * <li>Obtain an instance of Camera from {@link #open(int)}. * * <li>Get existing (default) settings with {@link #getParameters()}. * * <li>If necessary, modify the returned {@link Camera.Parameters} object and call * {@link #setParameters(Camera.Parameters)}. * * <li>Call {@link #setDisplayOrientation(int)} to ensure correct orientation of preview. * * <li><b>Important</b>: Pass a fully initialized {@link SurfaceHolder} to * {@link #setPreviewDisplay(SurfaceHolder)}. Without a surface, the camera * will be unable to start the preview. * * <li><b>Important</b>: Call {@link #startPreview()} to start updating the * preview surface. Preview must be started before you can take a picture. * * <li>When you want, call {@link #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback, Camera.PictureCallback)} to * capture a photo. Wait for the callbacks to provide the actual image data. * * <li>After taking a picture, preview display will have stopped. To take more * photos, call {@link #startPreview()} again first. * * <li>Call {@link #stopPreview()} to stop updating the preview surface. * * <li><b>Important:</b> Call {@link #release()} to release the camera for * use by other applications. Applications should release the camera * immediately in {@link android.app.Activity#onPause()} (and re-{@link #open()} * it in {@link android.app.Activity#onResume()}). * </ol> * * <p>To quickly switch to video recording mode, use these steps:</p> * * <ol> * <li>Obtain and initialize a Camera and start preview as described above. * * <li>Call {@link #unlock()} to allow the media process to access the camera. * * <li>Pass the camera to {@link android.media.MediaRecorder#setCamera(Camera)}. * See {@link android.media.MediaRecorder} information about video recording. * * <li>When finished recording, call {@link #reconnect()} to re-acquire * and re-lock the camera. * * <li>If desired, restart preview and take more photos or videos. * * <li>Call {@link #stopPreview()} and {@link #release()} as described above. * </ol> * * <p>This class is not thread-safe, and is meant for use from one event thread. * Most long-running operations (preview, focus, photo capture, etc) happen * asynchronously and invoke callbacks as necessary. Callbacks will be invoked * on the event thread {@link #open(int)} was called from. This class's methods * must never be called from multiple threads at once.</p> * * <p class="caution"><strong>Caution:</strong> Different Android-powered devices * may have different hardware specifications, such as megapixel ratings and * auto-focus capabilities. In order for your application to be compatible with * more devices, you should not make assumptions about the device camera * specifications.</p> * * <div class="special reference"> * <h3>Developer Guides</h3> * <p>For more information about using cameras, read the * <a href="{@docRoot}guide/topics/media/camera.html">Camera</a> developer guide.</p> * </div> * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public class Camera { private static final String TAG = "Camera"; // These match the enums in frameworks/base/include/camera/Camera.h private static final int CAMERA_MSG_ERROR = 0x001; private static final int CAMERA_MSG_SHUTTER = 0x002; private static final int CAMERA_MSG_FOCUS = 0x004; private static final int CAMERA_MSG_ZOOM = 0x008; private static final int CAMERA_MSG_PREVIEW_FRAME = 0x010; private static final int CAMERA_MSG_VIDEO_FRAME = 0x020; private static final int CAMERA_MSG_POSTVIEW_FRAME = 0x040; private static final int CAMERA_MSG_RAW_IMAGE = 0x080; private static final int CAMERA_MSG_COMPRESSED_IMAGE = 0x100; private static final int CAMERA_MSG_RAW_IMAGE_NOTIFY = 0x200; private static final int CAMERA_MSG_PREVIEW_METADATA = 0x400; private static final int CAMERA_MSG_FOCUS_MOVE = 0x800; private long mNativeContext; // accessed by native methods private EventHandler mEventHandler; private ShutterCallback mShutterCallback; private PictureCallback mRawImageCallback; private PictureCallback mJpegCallback; private PreviewCallback mPreviewCallback; private boolean mUsingPreviewAllocation; private PictureCallback mPostviewCallback; private AutoFocusCallback mAutoFocusCallback; private AutoFocusMoveCallback mAutoFocusMoveCallback; private OnZoomChangeListener mZoomListener; private FaceDetectionListener mFaceListener; private ErrorCallback mErrorCallback; private ErrorCallback mDetailedErrorCallback; private boolean mOneShot; private boolean mWithBuffer; private boolean mFaceDetectionRunning = false; private final Object mAutoFocusCallbackLock = new Object(); private final Object mShutterSoundLock = new Object(); // for AppOps private @Nullable IAppOpsService mAppOps; private IAppOpsCallback mAppOpsCallback; @GuardedBy("mShutterSoundLock") private boolean mHasAppOpsPlayAudio = true; @GuardedBy("mShutterSoundLock") private boolean mShutterSoundEnabledFromApp = true; private static final int NO_ERROR = 0;
Broadcast Action: A new picture is taken by the camera, and the entry of the picture has been added to the media store. Intent.getData is URI of the picture.

In Android N this broadcast was removed, and applications are recommended to use JobInfo.Builder.Builder.addTriggerContentUri instead.

In Android O this broadcast has been brought back, but only for registered receivers. Apps that are actively running can again listen to the broadcast if they want an immediate clear signal about a picture being taken, however anything doing heavy work (or needing to be launched) as a result of this should still use JobScheduler.

/** * Broadcast Action: A new picture is taken by the camera, and the entry of * the picture has been added to the media store. * {@link android.content.Intent#getData} is URI of the picture. * * <p>In {@link android.os.Build.VERSION_CODES#N Android N} this broadcast was removed, and * applications are recommended to use * {@link android.app.job.JobInfo.Builder JobInfo.Builder}.{@link android.app.job.JobInfo.Builder#addTriggerContentUri} * instead.</p> * * <p>In {@link android.os.Build.VERSION_CODES#O Android O} this broadcast has been brought * back, but only for <em>registered</em> receivers. Apps that are actively running can * again listen to the broadcast if they want an immediate clear signal about a picture * being taken, however anything doing heavy work (or needing to be launched) as a result of * this should still use JobScheduler.</p> */
@SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION) public static final String ACTION_NEW_PICTURE = "android.hardware.action.NEW_PICTURE";
Broadcast Action: A new video is recorded by the camera, and the entry of the video has been added to the media store. Intent.getData is URI of the video.

In Android N this broadcast was removed, and applications are recommended to use JobInfo.Builder.Builder.addTriggerContentUri instead.

In Android O this broadcast has been brought back, but only for registered receivers. Apps that are actively running can again listen to the broadcast if they want an immediate clear signal about a video being taken, however anything doing heavy work (or needing to be launched) as a result of this should still use JobScheduler.

/** * Broadcast Action: A new video is recorded by the camera, and the entry * of the video has been added to the media store. * {@link android.content.Intent#getData} is URI of the video. * * <p>In {@link android.os.Build.VERSION_CODES#N Android N} this broadcast was removed, and * applications are recommended to use * {@link android.app.job.JobInfo.Builder JobInfo.Builder}.{@link android.app.job.JobInfo.Builder#addTriggerContentUri} * instead.</p> * * <p>In {@link android.os.Build.VERSION_CODES#O Android O} this broadcast has been brought * back, but only for <em>registered</em> receivers. Apps that are actively running can * again listen to the broadcast if they want an immediate clear signal about a video * being taken, however anything doing heavy work (or needing to be launched) as a result of * this should still use JobScheduler.</p> */
@SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION) public static final String ACTION_NEW_VIDEO = "android.hardware.action.NEW_VIDEO";
Camera HAL device API version 1.0
@hide
/** * Camera HAL device API version 1.0 * @hide */
public static final int CAMERA_HAL_API_VERSION_1_0 = 0x100;
A constant meaning the normal camera connect/open will be used.
/** * A constant meaning the normal camera connect/open will be used. */
private static final int CAMERA_HAL_API_VERSION_NORMAL_CONNECT = -2;
Used to indicate HAL version un-specified.
/** * Used to indicate HAL version un-specified. */
private static final int CAMERA_HAL_API_VERSION_UNSPECIFIED = -1;
Hardware face detection. It does not use much CPU.
/** * Hardware face detection. It does not use much CPU. */
private static final int CAMERA_FACE_DETECTION_HW = 0;
Software face detection. It uses some CPU.
/** * Software face detection. It uses some CPU. */
private static final int CAMERA_FACE_DETECTION_SW = 1;
Returns the number of physical cameras available on this device. The return value of this method might change dynamically if the device supports external cameras and an external camera is connected or disconnected. If there is a logical multi-camera in the system, to maintain app backward compatibility, this method will only expose one camera for every logical camera and underlying physical cameras group. Use camera2 API to see all cameras.
Returns:total number of accessible camera devices, or 0 if there are no cameras or an error was encountered enumerating them.
/** * Returns the number of physical cameras available on this device. * The return value of this method might change dynamically if the device * supports external cameras and an external camera is connected or * disconnected. * * If there is a * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA * logical multi-camera} in the system, to maintain app backward compatibility, this method will * only expose one camera for every logical camera and underlying physical cameras group. * Use camera2 API to see all cameras. * * @return total number of accessible camera devices, or 0 if there are no * cameras or an error was encountered enumerating them. */
public native static int getNumberOfCameras();
Returns the information about a particular camera. If getNumberOfCameras() returns N, the valid id is 0 to N-1.
Throws:
  • RuntimeException – if an invalid ID is provided, or if there is an error retrieving the information (generally due to a hardware or other low-level failure).
/** * Returns the information about a particular camera. * If {@link #getNumberOfCameras()} returns N, the valid id is 0 to N-1. * * @throws RuntimeException if an invalid ID is provided, or if there is an * error retrieving the information (generally due to a hardware or other * low-level failure). */
public static void getCameraInfo(int cameraId, CameraInfo cameraInfo) { _getCameraInfo(cameraId, cameraInfo); IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE); IAudioService audioService = IAudioService.Stub.asInterface(b); try { if (audioService.isCameraSoundForced()) { // Only set this when sound is forced; otherwise let native code // decide. cameraInfo.canDisableShutterSound = false; } } catch (RemoteException e) { Log.e(TAG, "Audio service is unavailable for queries"); } } private native static void _getCameraInfo(int cameraId, CameraInfo cameraInfo);
Information about a camera
Deprecated:We recommend using the new camera2 API for new applications.
/** * Information about a camera * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public static class CameraInfo {
The facing of the camera is opposite to that of the screen.
/** * The facing of the camera is opposite to that of the screen. */
public static final int CAMERA_FACING_BACK = 0;
The facing of the camera is the same as that of the screen.
/** * The facing of the camera is the same as that of the screen. */
public static final int CAMERA_FACING_FRONT = 1;
The direction that the camera faces. It should be CAMERA_FACING_BACK or CAMERA_FACING_FRONT.
/** * The direction that the camera faces. It should be * CAMERA_FACING_BACK or CAMERA_FACING_FRONT. */
public int facing;

The orientation of the camera image. The value is the angle that the camera image needs to be rotated clockwise so it shows correctly on the display in its natural orientation. It should be 0, 90, 180, or 270.

For example, suppose a device has a naturally tall screen. The back-facing camera sensor is mounted in landscape. You are looking at the screen. If the top side of the camera sensor is aligned with the right edge of the screen in natural orientation, the value should be 90. If the top side of a front-facing camera sensor is aligned with the right of the screen, the value should be 270.

See Also:
/** * <p>The orientation of the camera image. The value is the angle that the * camera image needs to be rotated clockwise so it shows correctly on * the display in its natural orientation. It should be 0, 90, 180, or 270.</p> * * <p>For example, suppose a device has a naturally tall screen. The * back-facing camera sensor is mounted in landscape. You are looking at * the screen. If the top side of the camera sensor is aligned with the * right edge of the screen in natural orientation, the value should be * 90. If the top side of a front-facing camera sensor is aligned with * the right of the screen, the value should be 270.</p> * * @see #setDisplayOrientation(int) * @see Parameters#setRotation(int) * @see Parameters#setPreviewSize(int, int) * @see Parameters#setPictureSize(int, int) * @see Parameters#setJpegThumbnailSize(int, int) */
public int orientation;

Whether the shutter sound can be disabled.

On some devices, the camera shutter sound cannot be turned off through enableShutterSound. This field can be used to determine whether a call to disable the shutter sound will succeed.

If this field is set to true, then a call of enableShutterSound(false) will be successful. If set to false, then that call will fail, and the shutter sound will be played when takePicture is called.

/** * <p>Whether the shutter sound can be disabled.</p> * * <p>On some devices, the camera shutter sound cannot be turned off * through {@link #enableShutterSound enableShutterSound}. This field * can be used to determine whether a call to disable the shutter sound * will succeed.</p> * * <p>If this field is set to true, then a call of * {@code enableShutterSound(false)} will be successful. If set to * false, then that call will fail, and the shutter sound will be played * when {@link Camera#takePicture takePicture} is called.</p> */
public boolean canDisableShutterSound; };
Creates a new Camera object to access a particular hardware camera. If the same camera is opened by other applications, this will throw a RuntimeException.

You must call release() when you are done using the camera, otherwise it will remain locked and be unavailable to other applications.

Your application should only have one Camera object active at a time for a particular hardware camera.

Callbacks from other methods are delivered to the event loop of the thread which called open(). If this thread has no event loop, then callbacks are delivered to the main application event loop. If there is no main application event loop, callbacks are not delivered.

Caution: On some devices, this method may take a long time to complete. It is best to call this method from a worker thread (possibly using AsyncTask) to avoid blocking the main application UI thread.

Params:
Throws:
  • RuntimeException – if opening the camera fails (for example, if the camera is in use by another process or device policy manager has disabled the camera).
See Also:
Returns:a new Camera object, connected, locked and ready for use.
/** * Creates a new Camera object to access a particular hardware camera. If * the same camera is opened by other applications, this will throw a * RuntimeException. * * <p>You must call {@link #release()} when you are done using the camera, * otherwise it will remain locked and be unavailable to other applications. * * <p>Your application should only have one Camera object active at a time * for a particular hardware camera. * * <p>Callbacks from other methods are delivered to the event loop of the * thread which called open(). If this thread has no event loop, then * callbacks are delivered to the main application event loop. If there * is no main application event loop, callbacks are not delivered. * * <p class="caution"><b>Caution:</b> On some devices, this method may * take a long time to complete. It is best to call this method from a * worker thread (possibly using {@link android.os.AsyncTask}) to avoid * blocking the main application UI thread. * * @param cameraId the hardware camera to access, between 0 and * {@link #getNumberOfCameras()}-1. * @return a new Camera object, connected, locked and ready for use. * @throws RuntimeException if opening the camera fails (for example, if the * camera is in use by another process or device policy manager has * disabled the camera). * @see android.app.admin.DevicePolicyManager#getCameraDisabled(android.content.ComponentName) */
public static Camera open(int cameraId) { return new Camera(cameraId); }
Creates a new Camera object to access the first back-facing camera on the device. If the device does not have a back-facing camera, this returns null. Otherwise acts like the open(int) call.
See Also:
Returns:a new Camera object for the first back-facing camera, or null if there is no backfacing camera
/** * Creates a new Camera object to access the first back-facing camera on the * device. If the device does not have a back-facing camera, this returns * null. Otherwise acts like the {@link #open(int)} call. * * @return a new Camera object for the first back-facing camera, or null if there is no * backfacing camera * @see #open(int) */
public static Camera open() { int numberOfCameras = getNumberOfCameras(); CameraInfo cameraInfo = new CameraInfo(); for (int i = 0; i < numberOfCameras; i++) { getCameraInfo(i, cameraInfo); if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) { return new Camera(i); } } return null; }
Creates a new Camera object to access a particular hardware camera with given hal API version. If the same camera is opened by other applications or the hal API version is not supported by this device, this will throw a RuntimeException.

You must call release() when you are done using the camera, otherwise it will remain locked and be unavailable to other applications.

Your application should only have one Camera object active at a time for a particular hardware camera.

Callbacks from other methods are delivered to the event loop of the thread which called open(). If this thread has no event loop, then callbacks are delivered to the main application event loop. If there is no main application event loop, callbacks are not delivered.

Caution: On some devices, this method may take a long time to complete. It is best to call this method from a worker thread (possibly using AsyncTask) to avoid blocking the main application UI thread.

Params:
  • cameraId – The hardware camera to access, between 0 and getNumberOfCameras()-1.
  • halVersion – The HAL API version this camera device to be opened as.
Throws:
  • IllegalArgumentException – if the halVersion is invalid
  • RuntimeException – if opening the camera fails (for example, if the camera is in use by another process or device policy manager has disabled the camera).
See Also:
Returns:a new Camera object, connected, locked and ready for use.
@hide
/** * Creates a new Camera object to access a particular hardware camera with * given hal API version. If the same camera is opened by other applications * or the hal API version is not supported by this device, this will throw a * RuntimeException. * <p> * You must call {@link #release()} when you are done using the camera, * otherwise it will remain locked and be unavailable to other applications. * <p> * Your application should only have one Camera object active at a time for * a particular hardware camera. * <p> * Callbacks from other methods are delivered to the event loop of the * thread which called open(). If this thread has no event loop, then * callbacks are delivered to the main application event loop. If there is * no main application event loop, callbacks are not delivered. * <p class="caution"> * <b>Caution:</b> On some devices, this method may take a long time to * complete. It is best to call this method from a worker thread (possibly * using {@link android.os.AsyncTask}) to avoid blocking the main * application UI thread. * * @param cameraId The hardware camera to access, between 0 and * {@link #getNumberOfCameras()}-1. * @param halVersion The HAL API version this camera device to be opened as. * @return a new Camera object, connected, locked and ready for use. * * @throws IllegalArgumentException if the {@code halVersion} is invalid * * @throws RuntimeException if opening the camera fails (for example, if the * camera is in use by another process or device policy manager has disabled * the camera). * * @see android.app.admin.DevicePolicyManager#getCameraDisabled(android.content.ComponentName) * @see #CAMERA_HAL_API_VERSION_1_0 * * @hide */
public static Camera openLegacy(int cameraId, int halVersion) { if (halVersion < CAMERA_HAL_API_VERSION_1_0) { throw new IllegalArgumentException("Invalid HAL version " + halVersion); } return new Camera(cameraId, halVersion); }
Create a legacy camera object.
Params:
  • cameraId – The hardware camera to access, between 0 and getNumberOfCameras()-1.
  • halVersion – The HAL API version this camera device to be opened as.
/** * Create a legacy camera object. * * @param cameraId The hardware camera to access, between 0 and * {@link #getNumberOfCameras()}-1. * @param halVersion The HAL API version this camera device to be opened as. */
private Camera(int cameraId, int halVersion) { int err = cameraInitVersion(cameraId, halVersion); if (checkInitErrors(err)) { if (err == -EACCES) { throw new RuntimeException("Fail to connect to camera service"); } else if (err == -ENODEV) { throw new RuntimeException("Camera initialization failed"); } else if (err == -ENOSYS) { throw new RuntimeException("Camera initialization failed because some methods" + " are not implemented"); } else if (err == -EOPNOTSUPP) { throw new RuntimeException("Camera initialization failed because the hal" + " version is not supported by this device"); } else if (err == -EINVAL) { throw new RuntimeException("Camera initialization failed because the input" + " arugments are invalid"); } else if (err == -EBUSY) { throw new RuntimeException("Camera initialization failed because the camera" + " device was already opened"); } else if (err == -EUSERS) { throw new RuntimeException("Camera initialization failed because the max" + " number of camera devices were already opened"); } // Should never hit this. throw new RuntimeException("Unknown camera error"); } } private int cameraInitVersion(int cameraId, int halVersion) { mShutterCallback = null; mRawImageCallback = null; mJpegCallback = null; mPreviewCallback = null; mPostviewCallback = null; mUsingPreviewAllocation = false; mZoomListener = null; Looper looper; if ((looper = Looper.myLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else if ((looper = Looper.getMainLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else { mEventHandler = null; } return native_setup(new WeakReference<Camera>(this), cameraId, halVersion, ActivityThread.currentOpPackageName()); } private int cameraInitNormal(int cameraId) { return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT); }
Connect to the camera service using #connectLegacy

This acts the same as normal except that it will return the detailed error code if open fails instead of converting everything into NO_INIT.

Intended to use by the camera2 shim only, do not use this for other code.

Returns:a detailed errno error code, or NO_ERROR on success
@hide
/** * Connect to the camera service using #connectLegacy * * <p> * This acts the same as normal except that it will return * the detailed error code if open fails instead of * converting everything into {@code NO_INIT}.</p> * * <p>Intended to use by the camera2 shim only, do <i>not</i> use this for other code.</p> * * @return a detailed errno error code, or {@code NO_ERROR} on success * * @hide */
public int cameraInitUnspecified(int cameraId) { return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_UNSPECIFIED); }
used by Camera#open, Camera#open(int)
/** used by Camera#open, Camera#open(int) */
Camera(int cameraId) { int err = cameraInitNormal(cameraId); if (checkInitErrors(err)) { if (err == -EACCES) { throw new RuntimeException("Fail to connect to camera service"); } else if (err == -ENODEV) { throw new RuntimeException("Camera initialization failed"); } // Should never hit this. throw new RuntimeException("Unknown camera error"); } initAppOps(); }
@hide
/** * @hide */
public static boolean checkInitErrors(int err) { return err != NO_ERROR; }
@hide
/** * @hide */
public static Camera openUninitialized() { return new Camera(); }
An empty Camera for testing purpose.
/** * An empty Camera for testing purpose. */
Camera() { initAppOps(); } private void initAppOps() { IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE); mAppOps = IAppOpsService.Stub.asInterface(b); // initialize mHasAppOpsPlayAudio updateAppOpsPlayAudio(); // register a callback to monitor whether the OP_PLAY_AUDIO is still allowed mAppOpsCallback = new IAppOpsCallbackWrapper(this); try { mAppOps.startWatchingMode(AppOpsManager.OP_PLAY_AUDIO, ActivityThread.currentPackageName(), mAppOpsCallback); } catch (RemoteException e) { Log.e(TAG, "Error registering appOps callback", e); mHasAppOpsPlayAudio = false; } } private void releaseAppOps() { try { if (mAppOps != null) { mAppOps.stopWatchingMode(mAppOpsCallback); } } catch (Exception e) { // nothing to do here, the object is supposed to be released anyway } } @Override protected void finalize() { release(); } private native final int native_setup(Object camera_this, int cameraId, int halVersion, String packageName); private native final void native_release();
Disconnects and releases the Camera object resources.

You must call this as soon as you're done with the Camera object.

/** * Disconnects and releases the Camera object resources. * * <p>You must call this as soon as you're done with the Camera object.</p> */
public final void release() { native_release(); mFaceDetectionRunning = false; releaseAppOps(); }
Unlocks the camera to allow another process to access it. Normally, the camera is locked to the process with an active Camera object until release() is called. To allow rapid handoff between processes, you can call this method to release the camera temporarily for another process to use; once the other process is done you can call reconnect() to reclaim the camera.

This must be done before calling MediaRecorder.setCamera(Camera). This cannot be called after recording starts.

If you are not recording video, you probably do not need this method.

Throws:
/** * Unlocks the camera to allow another process to access it. * Normally, the camera is locked to the process with an active Camera * object until {@link #release()} is called. To allow rapid handoff * between processes, you can call this method to release the camera * temporarily for another process to use; once the other process is done * you can call {@link #reconnect()} to reclaim the camera. * * <p>This must be done before calling * {@link android.media.MediaRecorder#setCamera(Camera)}. This cannot be * called after recording starts. * * <p>If you are not recording video, you probably do not need this method. * * @throws RuntimeException if the camera cannot be unlocked. */
public native final void unlock();
Re-locks the camera to prevent other processes from accessing it. Camera objects are locked by default unless unlock() is called. Normally reconnect() is used instead.

Since API level 14, camera is automatically locked for applications in MediaRecorder.start(). Applications can use the camera (ex: zoom) after recording starts. There is no need to call this after recording starts or stops.

If you are not recording video, you probably do not need this method.

Throws:
  • RuntimeException – if the camera cannot be re-locked (for example, if the camera is still in use by another process).
/** * Re-locks the camera to prevent other processes from accessing it. * Camera objects are locked by default unless {@link #unlock()} is * called. Normally {@link #reconnect()} is used instead. * * <p>Since API level 14, camera is automatically locked for applications in * {@link android.media.MediaRecorder#start()}. Applications can use the * camera (ex: zoom) after recording starts. There is no need to call this * after recording starts or stops. * * <p>If you are not recording video, you probably do not need this method. * * @throws RuntimeException if the camera cannot be re-locked (for * example, if the camera is still in use by another process). */
public native final void lock();
Reconnects to the camera service after another process used it. After unlock() is called, another process may use the camera; when the process is done, you must reconnect to the camera, which will re-acquire the lock and allow you to continue using the camera.

Since API level 14, camera is automatically locked for applications in MediaRecorder.start(). Applications can use the camera (ex: zoom) after recording starts. There is no need to call this after recording starts or stops.

If you are not recording video, you probably do not need this method.

Throws:
  • IOException – if a connection cannot be re-established (for example, if the camera is still in use by another process).
  • RuntimeException – if release() has been called on this Camera instance.
/** * Reconnects to the camera service after another process used it. * After {@link #unlock()} is called, another process may use the * camera; when the process is done, you must reconnect to the camera, * which will re-acquire the lock and allow you to continue using the * camera. * * <p>Since API level 14, camera is automatically locked for applications in * {@link android.media.MediaRecorder#start()}. Applications can use the * camera (ex: zoom) after recording starts. There is no need to call this * after recording starts or stops. * * <p>If you are not recording video, you probably do not need this method. * * @throws IOException if a connection cannot be re-established (for * example, if the camera is still in use by another process). * @throws RuntimeException if release() has been called on this Camera * instance. */
public native final void reconnect() throws IOException;
Sets the Surface to be used for live preview. Either a surface or surface texture is necessary for preview, and preview is necessary to take pictures. The same surface can be re-set without harm. Setting a preview surface will un-set any preview surface texture that was set via setPreviewTexture.

The SurfaceHolder must already contain a surface when this method is called. If you are using SurfaceView, you will need to register a Callback with SurfaceHolder.addCallback(Callback) and wait for Callback.surfaceCreated(SurfaceHolder) before calling setPreviewDisplay() or starting preview.

This method must be called before startPreview(). The one exception is that if the preview surface is not set (or set to null) before startPreview() is called, then this method may be called once with a non-null parameter to set the preview surface. (This allows camera setup and surface creation to happen in parallel, saving time.) The preview surface may not otherwise change while preview is running.

Params:
  • holder – containing the Surface on which to place the preview, or null to remove the preview surface
Throws:
  • IOException – if the method fails (for example, if the surface is unavailable or unsuitable).
  • RuntimeException – if release() has been called on this Camera instance.
/** * Sets the {@link Surface} to be used for live preview. * Either a surface or surface texture is necessary for preview, and * preview is necessary to take pictures. The same surface can be re-set * without harm. Setting a preview surface will un-set any preview surface * texture that was set via {@link #setPreviewTexture}. * * <p>The {@link SurfaceHolder} must already contain a surface when this * method is called. If you are using {@link android.view.SurfaceView}, * you will need to register a {@link SurfaceHolder.Callback} with * {@link SurfaceHolder#addCallback(SurfaceHolder.Callback)} and wait for * {@link SurfaceHolder.Callback#surfaceCreated(SurfaceHolder)} before * calling setPreviewDisplay() or starting preview. * * <p>This method must be called before {@link #startPreview()}. The * one exception is that if the preview surface is not set (or set to null) * before startPreview() is called, then this method may be called once * with a non-null parameter to set the preview surface. (This allows * camera setup and surface creation to happen in parallel, saving time.) * The preview surface may not otherwise change while preview is running. * * @param holder containing the Surface on which to place the preview, * or null to remove the preview surface * @throws IOException if the method fails (for example, if the surface * is unavailable or unsuitable). * @throws RuntimeException if release() has been called on this Camera * instance. */
public final void setPreviewDisplay(SurfaceHolder holder) throws IOException { if (holder != null) { setPreviewSurface(holder.getSurface()); } else { setPreviewSurface((Surface)null); } }
@hide
/** * @hide */
public native final void setPreviewSurface(Surface surface) throws IOException;
Sets the SurfaceTexture to be used for live preview. Either a surface or surface texture is necessary for preview, and preview is necessary to take pictures. The same surface texture can be re-set without harm. Setting a preview surface texture will un-set any preview surface that was set via setPreviewDisplay.

This method must be called before startPreview(). The one exception is that if the preview surface texture is not set (or set to null) before startPreview() is called, then this method may be called once with a non-null parameter to set the preview surface. (This allows camera setup and surface creation to happen in parallel, saving time.) The preview surface texture may not otherwise change while preview is running.

The timestamps provided by SurfaceTexture.getTimestamp() for a SurfaceTexture set as the preview texture have an unspecified zero point, and cannot be directly compared between different cameras or different instances of the same camera, or across multiple runs of the same program.

If you are using the preview data to create video or still images, strongly consider using MediaActionSound to properly indicate image capture or recording start/stop to the user.

Params:
  • surfaceTexture – the SurfaceTexture to which the preview images are to be sent or null to remove the current preview surface texture
Throws:
  • IOException – if the method fails (for example, if the surface texture is unavailable or unsuitable).
  • RuntimeException – if release() has been called on this Camera instance.
See Also:
/** * Sets the {@link SurfaceTexture} to be used for live preview. * Either a surface or surface texture is necessary for preview, and * preview is necessary to take pictures. The same surface texture can be * re-set without harm. Setting a preview surface texture will un-set any * preview surface that was set via {@link #setPreviewDisplay}. * * <p>This method must be called before {@link #startPreview()}. The * one exception is that if the preview surface texture is not set (or set * to null) before startPreview() is called, then this method may be called * once with a non-null parameter to set the preview surface. (This allows * camera setup and surface creation to happen in parallel, saving time.) * The preview surface texture may not otherwise change while preview is * running. * * <p>The timestamps provided by {@link SurfaceTexture#getTimestamp()} for a * SurfaceTexture set as the preview texture have an unspecified zero point, * and cannot be directly compared between different cameras or different * instances of the same camera, or across multiple runs of the same * program. * * <p>If you are using the preview data to create video or still images, * strongly consider using {@link android.media.MediaActionSound} to * properly indicate image capture or recording start/stop to the user.</p> * * @see android.media.MediaActionSound * @see android.graphics.SurfaceTexture * @see android.view.TextureView * @param surfaceTexture the {@link SurfaceTexture} to which the preview * images are to be sent or null to remove the current preview surface * texture * @throws IOException if the method fails (for example, if the surface * texture is unavailable or unsuitable). * @throws RuntimeException if release() has been called on this Camera * instance. */
public native final void setPreviewTexture(SurfaceTexture surfaceTexture) throws IOException;
Callback interface used to deliver copies of preview frames as they are displayed.
See Also:
Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface used to deliver copies of preview frames as * they are displayed. * * @see #setPreviewCallback(Camera.PreviewCallback) * @see #setOneShotPreviewCallback(Camera.PreviewCallback) * @see #setPreviewCallbackWithBuffer(Camera.PreviewCallback) * @see #startPreview() * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface PreviewCallback {
Called as preview frames are displayed. This callback is invoked on the event thread Camera.open(int) was called from.

If using the ImageFormat.YV12 format, refer to the equations in Parameters.setPreviewFormat for the arrangement of the pixel data in the preview callback buffers.

Params:
/** * Called as preview frames are displayed. This callback is invoked * on the event thread {@link #open(int)} was called from. * * <p>If using the {@link android.graphics.ImageFormat#YV12} format, * refer to the equations in {@link Camera.Parameters#setPreviewFormat} * for the arrangement of the pixel data in the preview callback * buffers. * * @param data the contents of the preview frame in the format defined * by {@link android.graphics.ImageFormat}, which can be queried * with {@link android.hardware.Camera.Parameters#getPreviewFormat()}. * If {@link android.hardware.Camera.Parameters#setPreviewFormat(int)} * is never called, the default will be the YCbCr_420_SP * (NV21) format. * @param camera the Camera service object. */
void onPreviewFrame(byte[] data, Camera camera); };
Starts capturing and drawing preview frames to the screen. Preview will not actually start until a surface is supplied with setPreviewDisplay(SurfaceHolder) or setPreviewTexture(SurfaceTexture).

If setPreviewCallback(PreviewCallback), setOneShotPreviewCallback(PreviewCallback), or setPreviewCallbackWithBuffer(PreviewCallback) were called, PreviewCallback.onPreviewFrame(byte[], Camera) will be called when preview data becomes available.

Throws:
  • RuntimeException – if starting preview fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
/** * Starts capturing and drawing preview frames to the screen. * Preview will not actually start until a surface is supplied * with {@link #setPreviewDisplay(SurfaceHolder)} or * {@link #setPreviewTexture(SurfaceTexture)}. * * <p>If {@link #setPreviewCallback(Camera.PreviewCallback)}, * {@link #setOneShotPreviewCallback(Camera.PreviewCallback)}, or * {@link #setPreviewCallbackWithBuffer(Camera.PreviewCallback)} were * called, {@link Camera.PreviewCallback#onPreviewFrame(byte[], Camera)} * will be called when preview data becomes available. * * @throws RuntimeException if starting preview fails; usually this would be * because of a hardware or other low-level error, or because release() * has been called on this Camera instance. */
public native final void startPreview();
Stops capturing and drawing preview frames to the surface, and resets the camera for a future call to startPreview().
Throws:
  • RuntimeException – if stopping preview fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
/** * Stops capturing and drawing preview frames to the surface, and * resets the camera for a future call to {@link #startPreview()}. * * @throws RuntimeException if stopping preview fails; usually this would be * because of a hardware or other low-level error, or because release() * has been called on this Camera instance. */
public final void stopPreview() { _stopPreview(); mFaceDetectionRunning = false; mShutterCallback = null; mRawImageCallback = null; mPostviewCallback = null; mJpegCallback = null; synchronized (mAutoFocusCallbackLock) { mAutoFocusCallback = null; } mAutoFocusMoveCallback = null; } private native final void _stopPreview();
Return current preview state. FIXME: Unhide before release
@hide
/** * Return current preview state. * * FIXME: Unhide before release * @hide */
public native final boolean previewEnabled();

Installs a callback to be invoked for every preview frame in addition to displaying them on the screen. The callback will be repeatedly called for as long as preview is active. This method can be called at any time, even while preview is live. Any other preview callbacks are overridden.

If you are using the preview data to create video or still images, strongly consider using MediaActionSound to properly indicate image capture or recording start/stop to the user.

Params:
  • cb – a callback object that receives a copy of each preview frame, or null to stop receiving callbacks.
Throws:
See Also:
/** * <p>Installs a callback to be invoked for every preview frame in addition * to displaying them on the screen. The callback will be repeatedly called * for as long as preview is active. This method can be called at any time, * even while preview is live. Any other preview callbacks are * overridden.</p> * * <p>If you are using the preview data to create video or still images, * strongly consider using {@link android.media.MediaActionSound} to * properly indicate image capture or recording start/stop to the user.</p> * * @param cb a callback object that receives a copy of each preview frame, * or null to stop receiving callbacks. * @throws RuntimeException if release() has been called on this Camera * instance. * @see android.media.MediaActionSound */
public final void setPreviewCallback(PreviewCallback cb) { mPreviewCallback = cb; mOneShot = false; mWithBuffer = false; if (cb != null) { mUsingPreviewAllocation = false; } // Always use one-shot mode. We fake camera preview mode by // doing one-shot preview continuously. setHasPreviewCallback(cb != null, false); }

Installs a callback to be invoked for the next preview frame in addition to displaying it on the screen. After one invocation, the callback is cleared. This method can be called any time, even when preview is live. Any other preview callbacks are overridden.

If you are using the preview data to create video or still images, strongly consider using MediaActionSound to properly indicate image capture or recording start/stop to the user.

Params:
  • cb – a callback object that receives a copy of the next preview frame, or null to stop receiving callbacks.
Throws:
See Also:
/** * <p>Installs a callback to be invoked for the next preview frame in * addition to displaying it on the screen. After one invocation, the * callback is cleared. This method can be called any time, even when * preview is live. Any other preview callbacks are overridden.</p> * * <p>If you are using the preview data to create video or still images, * strongly consider using {@link android.media.MediaActionSound} to * properly indicate image capture or recording start/stop to the user.</p> * * @param cb a callback object that receives a copy of the next preview frame, * or null to stop receiving callbacks. * @throws RuntimeException if release() has been called on this Camera * instance. * @see android.media.MediaActionSound */
public final void setOneShotPreviewCallback(PreviewCallback cb) { mPreviewCallback = cb; mOneShot = true; mWithBuffer = false; if (cb != null) { mUsingPreviewAllocation = false; } setHasPreviewCallback(cb != null, false); } private native final void setHasPreviewCallback(boolean installed, boolean manualBuffer);

Installs a callback to be invoked for every preview frame, using buffers supplied with addCallbackBuffer(byte[]), in addition to displaying them on the screen. The callback will be repeatedly called for as long as preview is active and buffers are available. Any other preview callbacks are overridden.

The purpose of this method is to improve preview efficiency and frame rate by allowing preview frame memory reuse. You must call addCallbackBuffer(byte[]) at some point -- before or after calling this method -- or no callbacks will received.

The buffer queue will be cleared if this method is called with a null callback, setPreviewCallback(PreviewCallback) is called, or setOneShotPreviewCallback(PreviewCallback) is called.

If you are using the preview data to create video or still images, strongly consider using MediaActionSound to properly indicate image capture or recording start/stop to the user.

Params:
  • cb – a callback object that receives a copy of the preview frame, or null to stop receiving callbacks and clear the buffer queue.
Throws:
See Also:
/** * <p>Installs a callback to be invoked for every preview frame, using * buffers supplied with {@link #addCallbackBuffer(byte[])}, in addition to * displaying them on the screen. The callback will be repeatedly called * for as long as preview is active and buffers are available. Any other * preview callbacks are overridden.</p> * * <p>The purpose of this method is to improve preview efficiency and frame * rate by allowing preview frame memory reuse. You must call * {@link #addCallbackBuffer(byte[])} at some point -- before or after * calling this method -- or no callbacks will received.</p> * * <p>The buffer queue will be cleared if this method is called with a null * callback, {@link #setPreviewCallback(Camera.PreviewCallback)} is called, * or {@link #setOneShotPreviewCallback(Camera.PreviewCallback)} is * called.</p> * * <p>If you are using the preview data to create video or still images, * strongly consider using {@link android.media.MediaActionSound} to * properly indicate image capture or recording start/stop to the user.</p> * * @param cb a callback object that receives a copy of the preview frame, * or null to stop receiving callbacks and clear the buffer queue. * @throws RuntimeException if release() has been called on this Camera * instance. * @see #addCallbackBuffer(byte[]) * @see android.media.MediaActionSound */
public final void setPreviewCallbackWithBuffer(PreviewCallback cb) { mPreviewCallback = cb; mOneShot = false; mWithBuffer = true; if (cb != null) { mUsingPreviewAllocation = false; } setHasPreviewCallback(cb != null, true); }
Adds a pre-allocated buffer to the preview callback buffer queue. Applications can add one or more buffers to the queue. When a preview frame arrives and there is still at least one available buffer, the buffer will be used and removed from the queue. Then preview callback is invoked with the buffer. If a frame arrives and there is no buffer left, the frame is discarded. Applications should add buffers back when they finish processing the data in them.

For formats besides YV12, the size of the buffer is determined by multiplying the preview image width, height, and bytes per pixel. The width and height can be read from Parameters.getPreviewSize(). Bytes per pixel can be computed from ImageFormat.getBitsPerPixel(int) / 8, using the image format from Parameters.getPreviewFormat().

If using the ImageFormat.YV12 format, the size can be calculated using the equations listed in Parameters.setPreviewFormat.

This method is only necessary when setPreviewCallbackWithBuffer(PreviewCallback) is used. When setPreviewCallback(PreviewCallback) or setOneShotPreviewCallback(PreviewCallback) are used, buffers are automatically allocated. When a supplied buffer is too small to hold the preview frame data, preview callback will return null and the buffer will be removed from the buffer queue.

Params:
  • callbackBuffer – the buffer to add to the queue. The size of the buffer must match the values described above.
See Also:
/** * Adds a pre-allocated buffer to the preview callback buffer queue. * Applications can add one or more buffers to the queue. When a preview * frame arrives and there is still at least one available buffer, the * buffer will be used and removed from the queue. Then preview callback is * invoked with the buffer. If a frame arrives and there is no buffer left, * the frame is discarded. Applications should add buffers back when they * finish processing the data in them. * * <p>For formats besides YV12, the size of the buffer is determined by * multiplying the preview image width, height, and bytes per pixel. The * width and height can be read from * {@link Camera.Parameters#getPreviewSize()}. Bytes per pixel can be * computed from {@link android.graphics.ImageFormat#getBitsPerPixel(int)} / * 8, using the image format from * {@link Camera.Parameters#getPreviewFormat()}. * * <p>If using the {@link android.graphics.ImageFormat#YV12} format, the * size can be calculated using the equations listed in * {@link Camera.Parameters#setPreviewFormat}. * * <p>This method is only necessary when * {@link #setPreviewCallbackWithBuffer(PreviewCallback)} is used. When * {@link #setPreviewCallback(PreviewCallback)} or * {@link #setOneShotPreviewCallback(PreviewCallback)} are used, buffers * are automatically allocated. When a supplied buffer is too small to * hold the preview frame data, preview callback will return null and * the buffer will be removed from the buffer queue. * * @param callbackBuffer the buffer to add to the queue. The size of the * buffer must match the values described above. * @see #setPreviewCallbackWithBuffer(PreviewCallback) */
public final void addCallbackBuffer(byte[] callbackBuffer) { _addCallbackBuffer(callbackBuffer, CAMERA_MSG_PREVIEW_FRAME); }
Adds a pre-allocated buffer to the raw image callback buffer queue. Applications can add one or more buffers to the queue. When a raw image frame arrives and there is still at least one available buffer, the buffer will be used to hold the raw image data and removed from the queue. Then raw image callback is invoked with the buffer. If a raw image frame arrives but there is no buffer left, the frame is discarded. Applications should add buffers back when they finish processing the data in them by calling this method again in order to avoid running out of raw image callback buffers.

The size of the buffer is determined by multiplying the raw image width, height, and bytes per pixel. The width and height can be read from Parameters.getPictureSize(). Bytes per pixel can be computed from ImageFormat.getBitsPerPixel(int) / 8, using the image format from Parameters.getPreviewFormat().

This method is only necessary when the PictureCallbck for raw image is used while calling takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback).

Please note that by calling this method, the mode for application-managed callback buffers is triggered. If this method has never been called, null will be returned by the raw image callback since there is no image callback buffer available. Furthermore, When a supplied buffer is too small to hold the raw image data, raw image callback will return null and the buffer will be removed from the buffer queue.

Params:
  • callbackBuffer – the buffer to add to the raw image callback buffer queue. The size should be width * height * (bits per pixel) / 8. An null callbackBuffer will be ignored and won't be added to the queue.
See Also:
  • #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback, Camera.PictureCallback)}. {@hide}
/** * Adds a pre-allocated buffer to the raw image callback buffer queue. * Applications can add one or more buffers to the queue. When a raw image * frame arrives and there is still at least one available buffer, the * buffer will be used to hold the raw image data and removed from the * queue. Then raw image callback is invoked with the buffer. If a raw * image frame arrives but there is no buffer left, the frame is * discarded. Applications should add buffers back when they finish * processing the data in them by calling this method again in order * to avoid running out of raw image callback buffers. * * <p>The size of the buffer is determined by multiplying the raw image * width, height, and bytes per pixel. The width and height can be * read from {@link Camera.Parameters#getPictureSize()}. Bytes per pixel * can be computed from * {@link android.graphics.ImageFormat#getBitsPerPixel(int)} / 8, * using the image format from {@link Camera.Parameters#getPreviewFormat()}. * * <p>This method is only necessary when the PictureCallbck for raw image * is used while calling {@link #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback, Camera.PictureCallback)}. * * <p>Please note that by calling this method, the mode for * application-managed callback buffers is triggered. If this method has * never been called, null will be returned by the raw image callback since * there is no image callback buffer available. Furthermore, When a supplied * buffer is too small to hold the raw image data, raw image callback will * return null and the buffer will be removed from the buffer queue. * * @param callbackBuffer the buffer to add to the raw image callback buffer * queue. The size should be width * height * (bits per pixel) / 8. An * null callbackBuffer will be ignored and won't be added to the queue. * * @see #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback, Camera.PictureCallback)}. * * {@hide} */
public final void addRawImageCallbackBuffer(byte[] callbackBuffer) { addCallbackBuffer(callbackBuffer, CAMERA_MSG_RAW_IMAGE); } private final void addCallbackBuffer(byte[] callbackBuffer, int msgType) { // CAMERA_MSG_VIDEO_FRAME may be allowed in the future. if (msgType != CAMERA_MSG_PREVIEW_FRAME && msgType != CAMERA_MSG_RAW_IMAGE) { throw new IllegalArgumentException( "Unsupported message type: " + msgType); } _addCallbackBuffer(callbackBuffer, msgType); } private native final void _addCallbackBuffer( byte[] callbackBuffer, int msgType);

Create a RenderScript Allocation to use as a destination of preview callback frames. Use setPreviewCallbackAllocation to use the created Allocation as a destination for camera preview frames.

The Allocation will be created with a YUV type, and its contents must be accessed within Renderscript with the rsGetElementAtYuv_* accessor methods. Its size will be based on the current preview size configured for this camera.

Params:
  • rs – the RenderScript context for this Allocation.
  • usage – additional usage flags to set for the Allocation. The usage flag Allocation.USAGE_IO_INPUT will always be set on the created Allocation, but additional flags may be provided here.
Throws:
See Also:
Returns:a new YUV-type Allocation with dimensions equal to the current preview size.
@hide
/** * <p>Create a {@link android.renderscript RenderScript} * {@link android.renderscript.Allocation Allocation} to use as a * destination of preview callback frames. Use * {@link #setPreviewCallbackAllocation setPreviewCallbackAllocation} to use * the created Allocation as a destination for camera preview frames.</p> * * <p>The Allocation will be created with a YUV type, and its contents must * be accessed within Renderscript with the {@code rsGetElementAtYuv_*} * accessor methods. Its size will be based on the current * {@link Parameters#getPreviewSize preview size} configured for this * camera.</p> * * @param rs the RenderScript context for this Allocation. * @param usage additional usage flags to set for the Allocation. The usage * flag {@link android.renderscript.Allocation#USAGE_IO_INPUT} will always * be set on the created Allocation, but additional flags may be provided * here. * @return a new YUV-type Allocation with dimensions equal to the current * preview size. * @throws RSIllegalArgumentException if the usage flags are not compatible * with an YUV Allocation. * @see #setPreviewCallbackAllocation * @hide */
public final Allocation createPreviewAllocation(RenderScript rs, int usage) throws RSIllegalArgumentException { Parameters p = getParameters(); Size previewSize = p.getPreviewSize(); Type.Builder yuvBuilder = new Type.Builder(rs, Element.createPixel(rs, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); // Use YV12 for wide compatibility. Changing this requires also // adjusting camera service's format selection. yuvBuilder.setYuvFormat(ImageFormat.YV12); yuvBuilder.setX(previewSize.width); yuvBuilder.setY(previewSize.height); Allocation a = Allocation.createTyped(rs, yuvBuilder.create(), usage | Allocation.USAGE_IO_INPUT); return a; }

Set an Allocation as the target of preview callback data. Use this method for efficient processing of camera preview data with RenderScript. The Allocation must be created with the createPreviewAllocation method.

Setting a preview allocation will disable any active preview callbacks set by setPreviewCallback or setPreviewCallbackWithBuffer, and vice versa. Using a preview allocation still requires an active standard preview target to be set, either with setPreviewTexture or setPreviewDisplay.

To be notified when new frames are available to the Allocation, use Allocation.setIoInputNotificationHandler. To update the frame currently accessible from the Allocation to the latest preview frame, call Allocation.ioReceive.

To disable preview into the Allocation, call this method with a null parameter.

Once a preview allocation is set, the preview size set by setPreviewSize cannot be changed. If you wish to change the preview size, first remove the preview allocation by calling setPreviewCallbackAllocation(null), then change the preview size, create a new preview Allocation with createPreviewAllocation, and set it as the new preview callback allocation target.

If you are using the preview data to create video or still images, strongly consider using MediaActionSound to properly indicate image capture or recording start/stop to the user.

Params:
  • previewAllocation – the allocation to use as destination for preview
Throws:
  • IOException – if configuring the camera to use the Allocation for preview fails.
  • IllegalArgumentException – if the Allocation's dimensions or other parameters don't meet the requirements.
See Also:
@hide
/** * <p>Set an {@link android.renderscript.Allocation Allocation} as the * target of preview callback data. Use this method for efficient processing * of camera preview data with RenderScript. The Allocation must be created * with the {@link #createPreviewAllocation createPreviewAllocation } * method.</p> * * <p>Setting a preview allocation will disable any active preview callbacks * set by {@link #setPreviewCallback setPreviewCallback} or * {@link #setPreviewCallbackWithBuffer setPreviewCallbackWithBuffer}, and * vice versa. Using a preview allocation still requires an active standard * preview target to be set, either with * {@link #setPreviewTexture setPreviewTexture} or * {@link #setPreviewDisplay setPreviewDisplay}.</p> * * <p>To be notified when new frames are available to the Allocation, use * {@link android.renderscript.Allocation#setIoInputNotificationHandler Allocation.setIoInputNotificationHandler}. To * update the frame currently accessible from the Allocation to the latest * preview frame, call * {@link android.renderscript.Allocation#ioReceive Allocation.ioReceive}.</p> * * <p>To disable preview into the Allocation, call this method with a * {@code null} parameter.</p> * * <p>Once a preview allocation is set, the preview size set by * {@link Parameters#setPreviewSize setPreviewSize} cannot be changed. If * you wish to change the preview size, first remove the preview allocation * by calling {@code setPreviewCallbackAllocation(null)}, then change the * preview size, create a new preview Allocation with * {@link #createPreviewAllocation createPreviewAllocation}, and set it as * the new preview callback allocation target.</p> * * <p>If you are using the preview data to create video or still images, * strongly consider using {@link android.media.MediaActionSound} to * properly indicate image capture or recording start/stop to the user.</p> * * @param previewAllocation the allocation to use as destination for preview * @throws IOException if configuring the camera to use the Allocation for * preview fails. * @throws IllegalArgumentException if the Allocation's dimensions or other * parameters don't meet the requirements. * @see #createPreviewAllocation * @see #setPreviewCallback * @see #setPreviewCallbackWithBuffer * @hide */
public final void setPreviewCallbackAllocation(Allocation previewAllocation) throws IOException { Surface previewSurface = null; if (previewAllocation != null) { Parameters p = getParameters(); Size previewSize = p.getPreviewSize(); if (previewSize.width != previewAllocation.getType().getX() || previewSize.height != previewAllocation.getType().getY()) { throw new IllegalArgumentException( "Allocation dimensions don't match preview dimensions: " + "Allocation is " + previewAllocation.getType().getX() + ", " + previewAllocation.getType().getY() + ". Preview is " + previewSize.width + ", " + previewSize.height); } if ((previewAllocation.getUsage() & Allocation.USAGE_IO_INPUT) == 0) { throw new IllegalArgumentException( "Allocation usage does not include USAGE_IO_INPUT"); } if (previewAllocation.getType().getElement().getDataKind() != Element.DataKind.PIXEL_YUV) { throw new IllegalArgumentException( "Allocation is not of a YUV type"); } previewSurface = previewAllocation.getSurface(); mUsingPreviewAllocation = true; } else { mUsingPreviewAllocation = false; } setPreviewCallbackSurface(previewSurface); } private native final void setPreviewCallbackSurface(Surface s); private class EventHandler extends Handler { private final Camera mCamera; public EventHandler(Camera c, Looper looper) { super(looper); mCamera = c; } @Override public void handleMessage(Message msg) { switch(msg.what) { case CAMERA_MSG_SHUTTER: if (mShutterCallback != null) { mShutterCallback.onShutter(); } return; case CAMERA_MSG_RAW_IMAGE: if (mRawImageCallback != null) { mRawImageCallback.onPictureTaken((byte[])msg.obj, mCamera); } return; case CAMERA_MSG_COMPRESSED_IMAGE: if (mJpegCallback != null) { mJpegCallback.onPictureTaken((byte[])msg.obj, mCamera); } return; case CAMERA_MSG_PREVIEW_FRAME: PreviewCallback pCb = mPreviewCallback; if (pCb != null) { if (mOneShot) { // Clear the callback variable before the callback // in case the app calls setPreviewCallback from // the callback function mPreviewCallback = null; } else if (!mWithBuffer) { // We're faking the camera preview mode to prevent // the app from being flooded with preview frames. // Set to oneshot mode again. setHasPreviewCallback(true, false); } pCb.onPreviewFrame((byte[])msg.obj, mCamera); } return; case CAMERA_MSG_POSTVIEW_FRAME: if (mPostviewCallback != null) { mPostviewCallback.onPictureTaken((byte[])msg.obj, mCamera); } return; case CAMERA_MSG_FOCUS: AutoFocusCallback cb = null; synchronized (mAutoFocusCallbackLock) { cb = mAutoFocusCallback; } if (cb != null) { boolean success = msg.arg1 == 0 ? false : true; cb.onAutoFocus(success, mCamera); } return; case CAMERA_MSG_ZOOM: if (mZoomListener != null) { mZoomListener.onZoomChange(msg.arg1, msg.arg2 != 0, mCamera); } return; case CAMERA_MSG_PREVIEW_METADATA: if (mFaceListener != null) { mFaceListener.onFaceDetection((Face[])msg.obj, mCamera); } return; case CAMERA_MSG_ERROR : Log.e(TAG, "Error " + msg.arg1); if (mDetailedErrorCallback != null) { mDetailedErrorCallback.onError(msg.arg1, mCamera); } else if (mErrorCallback != null) { if (msg.arg1 == CAMERA_ERROR_DISABLED) { mErrorCallback.onError(CAMERA_ERROR_EVICTED, mCamera); } else { mErrorCallback.onError(msg.arg1, mCamera); } } return; case CAMERA_MSG_FOCUS_MOVE: if (mAutoFocusMoveCallback != null) { mAutoFocusMoveCallback.onAutoFocusMoving(msg.arg1 == 0 ? false : true, mCamera); } return; default: Log.e(TAG, "Unknown message type " + msg.what); return; } } } private static void postEventFromNative(Object camera_ref, int what, int arg1, int arg2, Object obj) { Camera c = (Camera)((WeakReference)camera_ref).get(); if (c == null) return; if (c.mEventHandler != null) { Message m = c.mEventHandler.obtainMessage(what, arg1, arg2, obj); c.mEventHandler.sendMessage(m); } }
Callback interface used to notify on completion of camera auto focus.

Devices that do not support auto-focus will receive a "fake" callback to this interface. If your application needs auto-focus and should not be installed on devices without auto-focus, you must declare that your app uses the android.hardware.camera.autofocus feature, in the <uses-feature> manifest element.

See Also:
Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface used to notify on completion of camera auto focus. * * <p>Devices that do not support auto-focus will receive a "fake" * callback to this interface. If your application needs auto-focus and * should not be installed on devices <em>without</em> auto-focus, you must * declare that your app uses the * {@code android.hardware.camera.autofocus} feature, in the * <a href="{@docRoot}guide/topics/manifest/uses-feature-element.html">&lt;uses-feature></a> * manifest element.</p> * * @see #autoFocus(AutoFocusCallback) * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface AutoFocusCallback {
Called when the camera auto focus completes. If the camera does not support auto-focus and autoFocus is called, onAutoFocus will be called immediately with a fake value of success set to true. The auto-focus routine does not lock auto-exposure and auto-white balance after it completes.
Params:
  • success – true if focus was successful, false if otherwise
  • camera – the Camera service object
See Also:
/** * Called when the camera auto focus completes. If the camera * does not support auto-focus and autoFocus is called, * onAutoFocus will be called immediately with a fake value of * <code>success</code> set to <code>true</code>. * * The auto-focus routine does not lock auto-exposure and auto-white * balance after it completes. * * @param success true if focus was successful, false if otherwise * @param camera the Camera service object * @see android.hardware.Camera.Parameters#setAutoExposureLock(boolean) * @see android.hardware.Camera.Parameters#setAutoWhiteBalanceLock(boolean) */
void onAutoFocus(boolean success, Camera camera); }
Starts camera auto-focus and registers a callback function to run when the camera is focused. This method is only valid when preview is active (between startPreview() and before stopPreview()).

Callers should check Parameters.getFocusMode() to determine if this method should be called. If the camera does not support auto-focus, it is a no-op and AutoFocusCallback.onAutoFocus(boolean, Camera) callback will be called immediately.

If your application should not be installed on devices without auto-focus, you must declare that your application uses auto-focus with the <uses-feature> manifest element.

If the current flash mode is not Parameters.FLASH_MODE_OFF, flash may be fired during auto-focus, depending on the driver and camera hardware.

Auto-exposure lock Parameters.getAutoExposureLock() and auto-white balance locks Parameters.getAutoWhiteBalanceLock() do not change during and after autofocus. But auto-focus routine may stop auto-exposure and auto-white balance transiently during focusing.

Stopping preview with stopPreview(), or triggering still image capture with takePicture(ShutterCallback, PictureCallback, PictureCallback), will not change the the focus position. Applications must call cancelAutoFocus to reset the focus.

If autofocus is successful, consider using MediaActionSound to properly play back an autofocus success sound to the user.

Params:
  • cb – the callback to run
Throws:
  • RuntimeException – if starting autofocus fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
See Also:
/** * Starts camera auto-focus and registers a callback function to run when * the camera is focused. This method is only valid when preview is active * (between {@link #startPreview()} and before {@link #stopPreview()}). * * <p>Callers should check * {@link android.hardware.Camera.Parameters#getFocusMode()} to determine if * this method should be called. If the camera does not support auto-focus, * it is a no-op and {@link AutoFocusCallback#onAutoFocus(boolean, Camera)} * callback will be called immediately. * * <p>If your application should not be installed * on devices without auto-focus, you must declare that your application * uses auto-focus with the * <a href="{@docRoot}guide/topics/manifest/uses-feature-element.html">&lt;uses-feature></a> * manifest element.</p> * * <p>If the current flash mode is not * {@link android.hardware.Camera.Parameters#FLASH_MODE_OFF}, flash may be * fired during auto-focus, depending on the driver and camera hardware.<p> * * <p>Auto-exposure lock {@link android.hardware.Camera.Parameters#getAutoExposureLock()} * and auto-white balance locks {@link android.hardware.Camera.Parameters#getAutoWhiteBalanceLock()} * do not change during and after autofocus. But auto-focus routine may stop * auto-exposure and auto-white balance transiently during focusing. * * <p>Stopping preview with {@link #stopPreview()}, or triggering still * image capture with {@link #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback)}, will not change the * the focus position. Applications must call cancelAutoFocus to reset the * focus.</p> * * <p>If autofocus is successful, consider using * {@link android.media.MediaActionSound} to properly play back an autofocus * success sound to the user.</p> * * @param cb the callback to run * @throws RuntimeException if starting autofocus fails; usually this would * be because of a hardware or other low-level error, or because * release() has been called on this Camera instance. * @see #cancelAutoFocus() * @see android.hardware.Camera.Parameters#setAutoExposureLock(boolean) * @see android.hardware.Camera.Parameters#setAutoWhiteBalanceLock(boolean) * @see android.media.MediaActionSound */
public final void autoFocus(AutoFocusCallback cb) { synchronized (mAutoFocusCallbackLock) { mAutoFocusCallback = cb; } native_autoFocus(); } private native final void native_autoFocus();
Cancels any auto-focus function in progress. Whether or not auto-focus is currently in progress, this function will return the focus position to the default. If the camera does not support auto-focus, this is a no-op.
Throws:
  • RuntimeException – if canceling autofocus fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
See Also:
/** * Cancels any auto-focus function in progress. * Whether or not auto-focus is currently in progress, * this function will return the focus position to the default. * If the camera does not support auto-focus, this is a no-op. * * @throws RuntimeException if canceling autofocus fails; usually this would * be because of a hardware or other low-level error, or because * release() has been called on this Camera instance. * @see #autoFocus(Camera.AutoFocusCallback) */
public final void cancelAutoFocus() { synchronized (mAutoFocusCallbackLock) { mAutoFocusCallback = null; } native_cancelAutoFocus(); // CAMERA_MSG_FOCUS should be removed here because the following // scenario can happen: // - An application uses the same thread for autoFocus, cancelAutoFocus // and looper thread. // - The application calls autoFocus. // - HAL sends CAMERA_MSG_FOCUS, which enters the looper message queue. // Before event handler's handleMessage() is invoked, the application // calls cancelAutoFocus and autoFocus. // - The application gets the old CAMERA_MSG_FOCUS and thinks autofocus // has been completed. But in fact it is not. // // As documented in the beginning of the file, apps should not use // multiple threads to call autoFocus and cancelAutoFocus at the same // time. It is HAL's responsibility not to send a CAMERA_MSG_FOCUS // message after native_cancelAutoFocus is called. mEventHandler.removeMessages(CAMERA_MSG_FOCUS); } private native final void native_cancelAutoFocus();
Callback interface used to notify on auto focus start and stop.

This is only supported in continuous autofocus modes -- Parameters.FOCUS_MODE_CONTINUOUS_VIDEO and Parameters.FOCUS_MODE_CONTINUOUS_PICTURE. Applications can show autofocus animation based on this.

Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface used to notify on auto focus start and stop. * * <p>This is only supported in continuous autofocus modes -- {@link * Parameters#FOCUS_MODE_CONTINUOUS_VIDEO} and {@link * Parameters#FOCUS_MODE_CONTINUOUS_PICTURE}. Applications can show * autofocus animation based on this.</p> * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface AutoFocusMoveCallback {
Called when the camera auto focus starts or stops.
Params:
  • start – true if focus starts to move, false if focus stops to move
  • camera – the Camera service object
/** * Called when the camera auto focus starts or stops. * * @param start true if focus starts to move, false if focus stops to move * @param camera the Camera service object */
void onAutoFocusMoving(boolean start, Camera camera); }
Sets camera auto-focus move callback.
Params:
  • cb – the callback to run
Throws:
  • RuntimeException – if enabling the focus move callback fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
/** * Sets camera auto-focus move callback. * * @param cb the callback to run * @throws RuntimeException if enabling the focus move callback fails; * usually this would be because of a hardware or other low-level error, * or because release() has been called on this Camera instance. */
public void setAutoFocusMoveCallback(AutoFocusMoveCallback cb) { mAutoFocusMoveCallback = cb; enableFocusMoveCallback((mAutoFocusMoveCallback != null) ? 1 : 0); } private native void enableFocusMoveCallback(int enable);
Callback interface used to signal the moment of actual image capture.
See Also:
  • takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback)
Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface used to signal the moment of actual image capture. * * @see #takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback) * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface ShutterCallback {
Called as near as possible to the moment when a photo is captured from the sensor. This is a good opportunity to play a shutter sound or give other feedback of camera operation. This may be some time after the photo was triggered, but some time before the actual data is available.
/** * Called as near as possible to the moment when a photo is captured * from the sensor. This is a good opportunity to play a shutter sound * or give other feedback of camera operation. This may be some time * after the photo was triggered, but some time before the actual data * is available. */
void onShutter(); }
Callback interface used to supply image data from a photo capture.
See Also:
  • takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback)
Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface used to supply image data from a photo capture. * * @see #takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback) * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface PictureCallback {
Called when image data is available after a picture is taken. The format of the data depends on the context of the callback and Parameters settings.
Params:
  • data – a byte array of the picture data
  • camera – the Camera service object
/** * Called when image data is available after a picture is taken. * The format of the data depends on the context of the callback * and {@link Camera.Parameters} settings. * * @param data a byte array of the picture data * @param camera the Camera service object */
void onPictureTaken(byte[] data, Camera camera); };
Equivalent to
takePicture(Shutter, raw, null, jpeg)
.
See Also:
  • takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback)
/** * Equivalent to <pre>takePicture(Shutter, raw, null, jpeg)</pre>. * * @see #takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback) */
public final void takePicture(ShutterCallback shutter, PictureCallback raw, PictureCallback jpeg) { takePicture(shutter, raw, null, jpeg); } private native final void native_takePicture(int msgType);
Triggers an asynchronous image capture. The camera service will initiate a series of callbacks to the application as the image capture progresses. The shutter callback occurs after the image is captured. This can be used to trigger a sound to let the user know that image has been captured. The raw callback occurs when the raw image data is available (NOTE: the data will be null if there is no raw image callback buffer available or the raw image callback buffer is not large enough to hold the raw image). The postview callback occurs when a scaled, fully processed postview image is available (NOTE: not all hardware supports this). The jpeg callback occurs when the compressed image is available. If the application does not need a particular callback, a null can be passed instead of a callback method.

This method is only valid when preview is active (after startPreview()). Preview will be stopped after the image is taken; callers must call startPreview() again if they want to re-start preview or take more pictures. This should not be called between MediaRecorder.start() and MediaRecorder.stop().

After calling this method, you must not call startPreview() or take another picture until the JPEG callback has returned.

Params:
  • shutter – the callback for image capture moment, or null
  • raw – the callback for raw (uncompressed) image data, or null
  • postview – callback with postview image data, may be null
  • jpeg – the callback for JPEG image data, or null
Throws:
  • RuntimeException – if starting picture capture fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
/** * Triggers an asynchronous image capture. The camera service will initiate * a series of callbacks to the application as the image capture progresses. * The shutter callback occurs after the image is captured. This can be used * to trigger a sound to let the user know that image has been captured. The * raw callback occurs when the raw image data is available (NOTE: the data * will be null if there is no raw image callback buffer available or the * raw image callback buffer is not large enough to hold the raw image). * The postview callback occurs when a scaled, fully processed postview * image is available (NOTE: not all hardware supports this). The jpeg * callback occurs when the compressed image is available. If the * application does not need a particular callback, a null can be passed * instead of a callback method. * * <p>This method is only valid when preview is active (after * {@link #startPreview()}). Preview will be stopped after the image is * taken; callers must call {@link #startPreview()} again if they want to * re-start preview or take more pictures. This should not be called between * {@link android.media.MediaRecorder#start()} and * {@link android.media.MediaRecorder#stop()}. * * <p>After calling this method, you must not call {@link #startPreview()} * or take another picture until the JPEG callback has returned. * * @param shutter the callback for image capture moment, or null * @param raw the callback for raw (uncompressed) image data, or null * @param postview callback with postview image data, may be null * @param jpeg the callback for JPEG image data, or null * @throws RuntimeException if starting picture capture fails; usually this * would be because of a hardware or other low-level error, or because * release() has been called on this Camera instance. */
public final void takePicture(ShutterCallback shutter, PictureCallback raw, PictureCallback postview, PictureCallback jpeg) { mShutterCallback = shutter; mRawImageCallback = raw; mPostviewCallback = postview; mJpegCallback = jpeg; // If callback is not set, do not send me callbacks. int msgType = 0; if (mShutterCallback != null) { msgType |= CAMERA_MSG_SHUTTER; } if (mRawImageCallback != null) { msgType |= CAMERA_MSG_RAW_IMAGE; } if (mPostviewCallback != null) { msgType |= CAMERA_MSG_POSTVIEW_FRAME; } if (mJpegCallback != null) { msgType |= CAMERA_MSG_COMPRESSED_IMAGE; } native_takePicture(msgType); mFaceDetectionRunning = false; }
Zooms to the requested value smoothly. The driver will notify OnZoomChangeListener of the zoom value and whether zoom is stopped at the time. For example, suppose the current zoom is 0 and startSmoothZoom is called with value 3. The OnZoomChangeListener.onZoomChange(int, boolean, Camera) method will be called three times with zoom values 1, 2, and 3. Applications can call stopSmoothZoom to stop the zoom earlier. Applications should not call startSmoothZoom again or change the zoom value before zoom stops. If the supplied zoom value equals to the current zoom value, no zoom callback will be generated. This method is supported if Parameters.isSmoothZoomSupported returns true.
Params:
Throws:
See Also:
/** * Zooms to the requested value smoothly. The driver will notify {@link * OnZoomChangeListener} of the zoom value and whether zoom is stopped at * the time. For example, suppose the current zoom is 0 and startSmoothZoom * is called with value 3. The * {@link Camera.OnZoomChangeListener#onZoomChange(int, boolean, Camera)} * method will be called three times with zoom values 1, 2, and 3. * Applications can call {@link #stopSmoothZoom} to stop the zoom earlier. * Applications should not call startSmoothZoom again or change the zoom * value before zoom stops. If the supplied zoom value equals to the current * zoom value, no zoom callback will be generated. This method is supported * if {@link android.hardware.Camera.Parameters#isSmoothZoomSupported} * returns true. * * @param value zoom value. The valid range is 0 to {@link * android.hardware.Camera.Parameters#getMaxZoom}. * @throws IllegalArgumentException if the zoom value is invalid. * @throws RuntimeException if the method fails. * @see #setZoomChangeListener(OnZoomChangeListener) */
public native final void startSmoothZoom(int value);
Stops the smooth zoom. Applications should wait for the OnZoomChangeListener to know when the zoom is actually stopped. This method is supported if Parameters.isSmoothZoomSupported is true.
Throws:
/** * Stops the smooth zoom. Applications should wait for the {@link * OnZoomChangeListener} to know when the zoom is actually stopped. This * method is supported if {@link * android.hardware.Camera.Parameters#isSmoothZoomSupported} is true. * * @throws RuntimeException if the method fails. */
public native final void stopSmoothZoom();
Set the clockwise rotation of preview display in degrees. This affects the preview frames and the picture displayed after snapshot. This method is useful for portrait mode applications. Note that preview display of front-facing cameras is flipped horizontally before the rotation, that is, the image is reflected along the central vertical axis of the camera sensor. So the users can see themselves as looking into a mirror.

This does not affect the order of byte array passed in PreviewCallback.onPreviewFrame, JPEG pictures, or recorded videos. This method is not allowed to be called during preview.

If you want to make the camera image show in the same orientation as the display, you can use the following code.

public static void setCameraDisplayOrientation(Activity activity,
        int cameraId, android.hardware.Camera camera) {
    android.hardware.Camera.CameraInfo info =
            new android.hardware.Camera.CameraInfo();
    android.hardware.Camera.getCameraInfo(cameraId, info);
    int rotation = activity.getWindowManager().getDefaultDisplay()
            .getRotation();
    int degrees = 0;
    switch (rotation) {
        case Surface.ROTATION_0: degrees = 0; break;
        case Surface.ROTATION_90: degrees = 90; break;
        case Surface.ROTATION_180: degrees = 180; break;
        case Surface.ROTATION_270: degrees = 270; break;
    }
    int result;
    if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
        result = (info.orientation + degrees) % 360;
        result = (360 - result) % 360;  // compensate the mirror
    } else {  // back-facing
        result = (info.orientation - degrees + 360) % 360;
    }
    camera.setDisplayOrientation(result);
}

Starting from API level 14, this method can be called when preview is active.

Note: Before API level 24, the default value for orientation is 0. Starting in API level 24, the default orientation will be such that applications in forced-landscape mode will have correct preview orientation, which may be either a default of 0 or 180. Applications that operate in portrait mode or allow for changing orientation must still call this method after each orientation change to ensure correct preview display in all cases.

Params:
  • degrees – the angle that the picture will be rotated clockwise. Valid values are 0, 90, 180, and 270.
Throws:
  • RuntimeException – if setting orientation fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
See Also:
/** * Set the clockwise rotation of preview display in degrees. This affects * the preview frames and the picture displayed after snapshot. This method * is useful for portrait mode applications. Note that preview display of * front-facing cameras is flipped horizontally before the rotation, that * is, the image is reflected along the central vertical axis of the camera * sensor. So the users can see themselves as looking into a mirror. * * <p>This does not affect the order of byte array passed in {@link * PreviewCallback#onPreviewFrame}, JPEG pictures, or recorded videos. This * method is not allowed to be called during preview. * * <p>If you want to make the camera image show in the same orientation as * the display, you can use the following code. * <pre> * public static void setCameraDisplayOrientation(Activity activity, * int cameraId, android.hardware.Camera camera) { * android.hardware.Camera.CameraInfo info = * new android.hardware.Camera.CameraInfo(); * android.hardware.Camera.getCameraInfo(cameraId, info); * int rotation = activity.getWindowManager().getDefaultDisplay() * .getRotation(); * int degrees = 0; * switch (rotation) { * case Surface.ROTATION_0: degrees = 0; break; * case Surface.ROTATION_90: degrees = 90; break; * case Surface.ROTATION_180: degrees = 180; break; * case Surface.ROTATION_270: degrees = 270; break; * } * * int result; * if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { * result = (info.orientation + degrees) % 360; * result = (360 - result) % 360; // compensate the mirror * } else { // back-facing * result = (info.orientation - degrees + 360) % 360; * } * camera.setDisplayOrientation(result); * } * </pre> * * <p>Starting from API level 14, this method can be called when preview is * active. * * <p><b>Note: </b>Before API level 24, the default value for orientation is 0. Starting in * API level 24, the default orientation will be such that applications in forced-landscape mode * will have correct preview orientation, which may be either a default of 0 or * 180. Applications that operate in portrait mode or allow for changing orientation must still * call this method after each orientation change to ensure correct preview display in all * cases.</p> * * @param degrees the angle that the picture will be rotated clockwise. * Valid values are 0, 90, 180, and 270. * @throws RuntimeException if setting orientation fails; usually this would * be because of a hardware or other low-level error, or because * release() has been called on this Camera instance. * @see #setPreviewDisplay(SurfaceHolder) */
public native final void setDisplayOrientation(int degrees);

Enable or disable the default shutter sound when taking a picture.

By default, the camera plays the system-defined camera shutter sound when takePicture is called. Using this method, the shutter sound can be disabled. It is strongly recommended that an alternative shutter sound is played in the ShutterCallback when the system shutter sound is disabled.

Note that devices may not always allow disabling the camera shutter sound. If the shutter sound state cannot be set to the desired value, this method will return false. CameraInfo.canDisableShutterSound can be used to determine whether the device will allow the shutter sound to be disabled.

Params:
  • enabled – whether the camera should play the system shutter sound when takePicture is called.
Throws:
  • RuntimeException – if the call fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
See Also:
Returns:true if the shutter sound state was successfully changed. false if the shutter sound state could not be changed. true is also returned if shutter sound playback is already set to the requested state.
/** * <p>Enable or disable the default shutter sound when taking a picture.</p> * * <p>By default, the camera plays the system-defined camera shutter sound * when {@link #takePicture} is called. Using this method, the shutter sound * can be disabled. It is strongly recommended that an alternative shutter * sound is played in the {@link ShutterCallback} when the system shutter * sound is disabled.</p> * * <p>Note that devices may not always allow disabling the camera shutter * sound. If the shutter sound state cannot be set to the desired value, * this method will return false. {@link CameraInfo#canDisableShutterSound} * can be used to determine whether the device will allow the shutter sound * to be disabled.</p> * * @param enabled whether the camera should play the system shutter sound * when {@link #takePicture takePicture} is called. * @return {@code true} if the shutter sound state was successfully * changed. {@code false} if the shutter sound state could not be * changed. {@code true} is also returned if shutter sound playback * is already set to the requested state. * @throws RuntimeException if the call fails; usually this would be because * of a hardware or other low-level error, or because release() has been * called on this Camera instance. * @see #takePicture * @see CameraInfo#canDisableShutterSound * @see ShutterCallback */
public final boolean enableShutterSound(boolean enabled) { boolean canDisableShutterSound = true; IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE); IAudioService audioService = IAudioService.Stub.asInterface(b); try { if (audioService.isCameraSoundForced()) { canDisableShutterSound = false; } } catch (RemoteException e) { Log.e(TAG, "Audio service is unavailable for queries"); } if (!enabled && !canDisableShutterSound) { return false; } synchronized (mShutterSoundLock) { mShutterSoundEnabledFromApp = enabled; // Return the result of _enableShutterSound(enabled) in all cases. // If the shutter sound can be disabled, disable it when the device is in DnD mode. boolean ret = _enableShutterSound(enabled); if (enabled && !mHasAppOpsPlayAudio) { Log.i(TAG, "Shutter sound is not allowed by AppOpsManager"); if (canDisableShutterSound) { _enableShutterSound(false); } } return ret; } }
Disable the shutter sound unconditionally.

This is only guaranteed to work for legacy cameras (i.e. initialized with cameraInitUnspecified). Trying to call this on a regular camera will force a conditional check in the camera service.

Returns:true if the shutter sound state was successfully changed. false if the shutter sound state could not be changed. true is also returned if shutter sound playback is already set to the requested state.
@hide
/** * Disable the shutter sound unconditionally. * * <p> * This is only guaranteed to work for legacy cameras * (i.e. initialized with {@link #cameraInitUnspecified}). Trying to call this on * a regular camera will force a conditional check in the camera service. * </p> * * @return {@code true} if the shutter sound state was successfully * changed. {@code false} if the shutter sound state could not be * changed. {@code true} is also returned if shutter sound playback * is already set to the requested state. * * @hide */
public final boolean disableShutterSound() { return _enableShutterSound(/*enabled*/false); } private native final boolean _enableShutterSound(boolean enabled); private static class IAppOpsCallbackWrapper extends IAppOpsCallback.Stub { private final WeakReference<Camera> mWeakCamera; IAppOpsCallbackWrapper(Camera camera) { mWeakCamera = new WeakReference<Camera>(camera); } @Override public void opChanged(int op, int uid, String packageName) { if (op == AppOpsManager.OP_PLAY_AUDIO) { final Camera camera = mWeakCamera.get(); if (camera != null) { camera.updateAppOpsPlayAudio(); } } } } private void updateAppOpsPlayAudio() { synchronized (mShutterSoundLock) { boolean oldHasAppOpsPlayAudio = mHasAppOpsPlayAudio; try { int mode = AppOpsManager.MODE_IGNORED; if (mAppOps != null) { mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO, AudioAttributes.USAGE_ASSISTANCE_SONIFICATION, Process.myUid(), ActivityThread.currentPackageName()); } mHasAppOpsPlayAudio = mode == AppOpsManager.MODE_ALLOWED; } catch (RemoteException e) { Log.e(TAG, "AppOpsService check audio operation failed"); mHasAppOpsPlayAudio = false; } if (oldHasAppOpsPlayAudio != mHasAppOpsPlayAudio) { if (!mHasAppOpsPlayAudio) { IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE); IAudioService audioService = IAudioService.Stub.asInterface(b); try { if (audioService.isCameraSoundForced()) { return; } } catch (RemoteException e) { Log.e(TAG, "Audio service is unavailable for queries"); } _enableShutterSound(false); } else { enableShutterSound(mShutterSoundEnabledFromApp); } } } }
Callback interface for zoom changes during a smooth zoom operation.
See Also:
Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface for zoom changes during a smooth zoom operation. * * @see #setZoomChangeListener(OnZoomChangeListener) * @see #startSmoothZoom(int) * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface OnZoomChangeListener {
Called when the zoom value has changed during a smooth zoom.
Params:
  • zoomValue – the current zoom value. In smooth zoom mode, camera calls this for every new zoom value.
  • stopped – whether smooth zoom is stopped. If the value is true, this is the last zoom update for the application.
  • camera – the Camera service object
/** * Called when the zoom value has changed during a smooth zoom. * * @param zoomValue the current zoom value. In smooth zoom mode, camera * calls this for every new zoom value. * @param stopped whether smooth zoom is stopped. If the value is true, * this is the last zoom update for the application. * @param camera the Camera service object */
void onZoomChange(int zoomValue, boolean stopped, Camera camera); };
Registers a listener to be notified when the zoom value is updated by the camera driver during smooth zoom.
Params:
  • listener – the listener to notify
See Also:
/** * Registers a listener to be notified when the zoom value is updated by the * camera driver during smooth zoom. * * @param listener the listener to notify * @see #startSmoothZoom(int) */
public final void setZoomChangeListener(OnZoomChangeListener listener) { mZoomListener = listener; }
Callback interface for face detected in the preview frame.
Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface for face detected in the preview frame. * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface FaceDetectionListener {
Notify the listener of the detected faces in the preview frame.
Params:
  • faces – The detected faces in a list
  • camera – The Camera service object
/** * Notify the listener of the detected faces in the preview frame. * * @param faces The detected faces in a list * @param camera The {@link Camera} service object */
void onFaceDetection(Face[] faces, Camera camera); }
Registers a listener to be notified about the faces detected in the preview frame.
Params:
  • listener – the listener to notify
See Also:
/** * Registers a listener to be notified about the faces detected in the * preview frame. * * @param listener the listener to notify * @see #startFaceDetection() */
public final void setFaceDetectionListener(FaceDetectionListener listener) { mFaceListener = listener; }
Starts the face detection. This should be called after preview is started. The camera will notify FaceDetectionListener of the detected faces in the preview frame. The detected faces may be the same as the previous ones. Applications should call stopFaceDetection to stop the face detection. This method is supported if Parameters.getMaxNumDetectedFaces() returns a number larger than 0. If the face detection has started, apps should not call this again.

When the face detection is running, Parameters.setWhiteBalance(String), Parameters.setFocusAreas(List<Area>), and Parameters.setMeteringAreas(List<Area>) have no effect. The camera uses the detected faces to do auto-white balance, auto exposure, and autofocus.

If the apps call autoFocus(AutoFocusCallback), the camera will stop sending face callbacks. The last face callback indicates the areas used to do autofocus. After focus completes, face detection will resume sending face callbacks. If the apps call cancelAutoFocus(), the face callbacks will also resume.

After calling takePicture(ShutterCallback, PictureCallback, PictureCallback) or stopPreview(), and then resuming preview with startPreview(), the apps should call this method again to resume face detection.

Throws:
See Also:
/** * Starts the face detection. This should be called after preview is started. * The camera will notify {@link FaceDetectionListener} of the detected * faces in the preview frame. The detected faces may be the same as the * previous ones. Applications should call {@link #stopFaceDetection} to * stop the face detection. This method is supported if {@link * Parameters#getMaxNumDetectedFaces()} returns a number larger than 0. * If the face detection has started, apps should not call this again. * * <p>When the face detection is running, {@link Parameters#setWhiteBalance(String)}, * {@link Parameters#setFocusAreas(List)}, and {@link Parameters#setMeteringAreas(List)} * have no effect. The camera uses the detected faces to do auto-white balance, * auto exposure, and autofocus. * * <p>If the apps call {@link #autoFocus(AutoFocusCallback)}, the camera * will stop sending face callbacks. The last face callback indicates the * areas used to do autofocus. After focus completes, face detection will * resume sending face callbacks. If the apps call {@link * #cancelAutoFocus()}, the face callbacks will also resume.</p> * * <p>After calling {@link #takePicture(Camera.ShutterCallback, Camera.PictureCallback, * Camera.PictureCallback)} or {@link #stopPreview()}, and then resuming * preview with {@link #startPreview()}, the apps should call this method * again to resume face detection.</p> * * @throws IllegalArgumentException if the face detection is unsupported. * @throws RuntimeException if the method fails or the face detection is * already running. * @see FaceDetectionListener * @see #stopFaceDetection() * @see Parameters#getMaxNumDetectedFaces() */
public final void startFaceDetection() { if (mFaceDetectionRunning) { throw new RuntimeException("Face detection is already running"); } _startFaceDetection(CAMERA_FACE_DETECTION_HW); mFaceDetectionRunning = true; }
Stops the face detection.
See Also:
  • startFaceDetection()
/** * Stops the face detection. * * @see #startFaceDetection() */
public final void stopFaceDetection() { _stopFaceDetection(); mFaceDetectionRunning = false; } private native final void _startFaceDetection(int type); private native final void _stopFaceDetection();
Information about a face identified through camera face detection.

When face detection is used with a camera, the FaceDetectionListener returns a list of face objects for use in focusing and metering.

See Also:
Deprecated:We recommend using the new camera2 API for new applications.
/** * Information about a face identified through camera face detection. * * <p>When face detection is used with a camera, the {@link FaceDetectionListener} returns a * list of face objects for use in focusing and metering.</p> * * @see FaceDetectionListener * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public static class Face {
Create an empty face.
/** * Create an empty face. */
public Face() { }
Bounds of the face. (-1000, -1000) represents the top-left of the camera field of view, and (1000, 1000) represents the bottom-right of the field of view. For example, suppose the size of the viewfinder UI is 800x480. The rect passed from the driver is (-1000, -1000, 0, 0). The corresponding viewfinder rect should be (0, 0, 400, 240). It is guaranteed left < right and top < bottom. The coordinates can be smaller than -1000 or bigger than 1000. But at least one vertex will be within (-1000, -1000) and (1000, 1000).

The direction is relative to the sensor orientation, that is, what the sensor sees. The direction is not affected by the rotation or mirroring of Camera.setDisplayOrientation(int). The face bounding rectangle does not provide any information about face orientation.

Here is the matrix to convert driver coordinates to View coordinates in pixels.

Matrix matrix = new Matrix();
CameraInfo info = CameraHolder.instance().getCameraInfo()[cameraId];
// Need mirror for front camera.
boolean mirror = (info.facing == CameraInfo.CAMERA_FACING_FRONT);
matrix.setScale(mirror ? -1 : 1, 1);
// This is the value for android.hardware.Camera.setDisplayOrientation.
matrix.postRotate(displayOrientation);
// Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
// UI coordinates range from (0, 0) to (width, height).
matrix.postScale(view.getWidth() / 2000f, view.getHeight() / 2000f);
matrix.postTranslate(view.getWidth() / 2f, view.getHeight() / 2f);
See Also:
/** * Bounds of the face. (-1000, -1000) represents the top-left of the * camera field of view, and (1000, 1000) represents the bottom-right of * the field of view. For example, suppose the size of the viewfinder UI * is 800x480. The rect passed from the driver is (-1000, -1000, 0, 0). * The corresponding viewfinder rect should be (0, 0, 400, 240). It is * guaranteed left < right and top < bottom. The coordinates can be * smaller than -1000 or bigger than 1000. But at least one vertex will * be within (-1000, -1000) and (1000, 1000). * * <p>The direction is relative to the sensor orientation, that is, what * the sensor sees. The direction is not affected by the rotation or * mirroring of {@link #setDisplayOrientation(int)}. The face bounding * rectangle does not provide any information about face orientation.</p> * * <p>Here is the matrix to convert driver coordinates to View coordinates * in pixels.</p> * <pre> * Matrix matrix = new Matrix(); * CameraInfo info = CameraHolder.instance().getCameraInfo()[cameraId]; * // Need mirror for front camera. * boolean mirror = (info.facing == CameraInfo.CAMERA_FACING_FRONT); * matrix.setScale(mirror ? -1 : 1, 1); * // This is the value for android.hardware.Camera.setDisplayOrientation. * matrix.postRotate(displayOrientation); * // Camera driver coordinates range from (-1000, -1000) to (1000, 1000). * // UI coordinates range from (0, 0) to (width, height). * matrix.postScale(view.getWidth() / 2000f, view.getHeight() / 2000f); * matrix.postTranslate(view.getWidth() / 2f, view.getHeight() / 2f); * </pre> * * @see #startFaceDetection() */
public Rect rect;

The confidence level for the detection of the face. The range is 1 to 100. 100 is the highest confidence.

Depending on the device, even very low-confidence faces may be listed, so applications should filter out faces with low confidence, depending on the use case. For a typical point-and-shoot camera application that wishes to display rectangles around detected faces, filtering out faces with confidence less than 50 is recommended.

See Also:
  • startFaceDetection()
/** * <p>The confidence level for the detection of the face. The range is 1 to * 100. 100 is the highest confidence.</p> * * <p>Depending on the device, even very low-confidence faces may be * listed, so applications should filter out faces with low confidence, * depending on the use case. For a typical point-and-shoot camera * application that wishes to display rectangles around detected faces, * filtering out faces with confidence less than 50 is recommended.</p> * * @see #startFaceDetection() */
public int score;
An unique id per face while the face is visible to the tracker. If the face leaves the field-of-view and comes back, it will get a new id. This is an optional field, may not be supported on all devices. If not supported, id will always be set to -1. The optional fields are supported as a set. Either they are all valid, or none of them are.
/** * An unique id per face while the face is visible to the tracker. If * the face leaves the field-of-view and comes back, it will get a new * id. This is an optional field, may not be supported on all devices. * If not supported, id will always be set to -1. The optional fields * are supported as a set. Either they are all valid, or none of them * are. */
public int id = -1;
The coordinates of the center of the left eye. The coordinates are in the same space as the ones for rect. This is an optional field, may not be supported on all devices. If not supported, the value will always be set to null. The optional fields are supported as a set. Either they are all valid, or none of them are.
/** * The coordinates of the center of the left eye. The coordinates are in * the same space as the ones for {@link #rect}. This is an optional * field, may not be supported on all devices. If not supported, the * value will always be set to null. The optional fields are supported * as a set. Either they are all valid, or none of them are. */
public Point leftEye = null;
The coordinates of the center of the right eye. The coordinates are in the same space as the ones for rect.This is an optional field, may not be supported on all devices. If not supported, the value will always be set to null. The optional fields are supported as a set. Either they are all valid, or none of them are.
/** * The coordinates of the center of the right eye. The coordinates are * in the same space as the ones for {@link #rect}.This is an optional * field, may not be supported on all devices. If not supported, the * value will always be set to null. The optional fields are supported * as a set. Either they are all valid, or none of them are. */
public Point rightEye = null;
The coordinates of the center of the mouth. The coordinates are in the same space as the ones for rect. This is an optional field, may not be supported on all devices. If not supported, the value will always be set to null. The optional fields are supported as a set. Either they are all valid, or none of them are.
/** * The coordinates of the center of the mouth. The coordinates are in * the same space as the ones for {@link #rect}. This is an optional * field, may not be supported on all devices. If not supported, the * value will always be set to null. The optional fields are supported * as a set. Either they are all valid, or none of them are. */
public Point mouth = null; }
Unspecified camera error.
See Also:
  • ErrorCallback
/** * Unspecified camera error. * @see Camera.ErrorCallback */
public static final int CAMERA_ERROR_UNKNOWN = 1;
Camera was disconnected due to use by higher priority user.
See Also:
  • ErrorCallback
/** * Camera was disconnected due to use by higher priority user. * @see Camera.ErrorCallback */
public static final int CAMERA_ERROR_EVICTED = 2;
Camera was disconnected due to device policy change or client application going to background.
See Also:
  • ErrorCallback
@hide
/** * Camera was disconnected due to device policy change or client * application going to background. * @see Camera.ErrorCallback * * @hide */
public static final int CAMERA_ERROR_DISABLED = 3;
Media server died. In this case, the application must release the Camera object and instantiate a new one.
See Also:
  • ErrorCallback
/** * Media server died. In this case, the application must release the * Camera object and instantiate a new one. * @see Camera.ErrorCallback */
public static final int CAMERA_ERROR_SERVER_DIED = 100;
Callback interface for camera error notification.
See Also:
  • setErrorCallback(ErrorCallback)
Deprecated:We recommend using the new camera2 API for new applications.
/** * Callback interface for camera error notification. * * @see #setErrorCallback(ErrorCallback) * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public interface ErrorCallback {
Callback for camera errors.
Params:
/** * Callback for camera errors. * @param error error code: * <ul> * <li>{@link #CAMERA_ERROR_UNKNOWN} * <li>{@link #CAMERA_ERROR_SERVER_DIED} * </ul> * @param camera the Camera service object */
void onError(int error, Camera camera); };
Registers a callback to be invoked when an error occurs.
Params:
  • cb – The callback to run
/** * Registers a callback to be invoked when an error occurs. * @param cb The callback to run */
public final void setErrorCallback(ErrorCallback cb) { mErrorCallback = cb; }
Registers a callback to be invoked when an error occurs. The detailed error callback may contain error code that gives more detailed information about the error. When a detailed callback is set, the callback set via #setErrorCallback(ErrorCallback) will stop receiving onError call.
Params:
  • cb – The callback to run
@hide
/** * Registers a callback to be invoked when an error occurs. * The detailed error callback may contain error code that * gives more detailed information about the error. * * When a detailed callback is set, the callback set via * #setErrorCallback(ErrorCallback) will stop receiving * onError call. * * @param cb The callback to run * * @hide */
public final void setDetailedErrorCallback(ErrorCallback cb) { mDetailedErrorCallback = cb; } private native final void native_setParameters(String params); private native final String native_getParameters();
Changes the settings for this Camera service.
Params:
  • params – the Parameters to use for this Camera service
Throws:
See Also:
/** * Changes the settings for this Camera service. * * @param params the Parameters to use for this Camera service * @throws RuntimeException if any parameter is invalid or not supported. * @see #getParameters() */
public void setParameters(Parameters params) { // If using preview allocations, don't allow preview size changes if (mUsingPreviewAllocation) { Size newPreviewSize = params.getPreviewSize(); Size currentPreviewSize = getParameters().getPreviewSize(); if (newPreviewSize.width != currentPreviewSize.width || newPreviewSize.height != currentPreviewSize.height) { throw new IllegalStateException("Cannot change preview size" + " while a preview allocation is configured."); } } native_setParameters(params.flatten()); }
Returns the current settings for this Camera service. If modifications are made to the returned Parameters, they must be passed to setParameters(Parameters) to take effect.
Throws:
  • RuntimeException – if reading parameters fails; usually this would be because of a hardware or other low-level error, or because release() has been called on this Camera instance.
See Also:
/** * Returns the current settings for this Camera service. * If modifications are made to the returned Parameters, they must be passed * to {@link #setParameters(Camera.Parameters)} to take effect. * * @throws RuntimeException if reading parameters fails; usually this would * be because of a hardware or other low-level error, or because * release() has been called on this Camera instance. * @see #setParameters(Camera.Parameters) */
public Parameters getParameters() { Parameters p = new Parameters(); String s = native_getParameters(); p.unflatten(s); return p; }
Returns an empty Parameters for testing purpose.
Returns:a Parameter object.
@hide
/** * Returns an empty {@link Parameters} for testing purpose. * * @return a Parameter object. * * @hide */
public static Parameters getEmptyParameters() { Camera camera = new Camera(); return camera.new Parameters(); }
Returns a copied Parameters; for shim use only.
Params:
  • parameters – a non-null parameters
Throws:
Returns:a Parameter object, with all the parameters copied from parameters.
@hide
/** * Returns a copied {@link Parameters}; for shim use only. * * @param parameters a non-{@code null} parameters * @return a Parameter object, with all the parameters copied from {@code parameters}. * * @throws NullPointerException if {@code parameters} was {@code null} * @hide */
public static Parameters getParametersCopy(Camera.Parameters parameters) { if (parameters == null) { throw new NullPointerException("parameters must not be null"); } Camera camera = parameters.getOuter(); Parameters p = camera.new Parameters(); p.copyFrom(parameters); return p; }
Image size (width and height dimensions).
Deprecated:We recommend using the new camera2 API for new applications.
/** * Image size (width and height dimensions). * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public class Size {
Sets the dimensions for pictures.
Params:
  • w – the photo width (pixels)
  • h – the photo height (pixels)
/** * Sets the dimensions for pictures. * * @param w the photo width (pixels) * @param h the photo height (pixels) */
public Size(int w, int h) { width = w; height = h; }
Compares obj to this size.
Params:
  • obj – the object to compare this size with.
Returns:true if the width and height of obj is the same as those of this size. false otherwise.
/** * Compares {@code obj} to this size. * * @param obj the object to compare this size with. * @return {@code true} if the width and height of {@code obj} is the * same as those of this size. {@code false} otherwise. */
@Override public boolean equals(Object obj) { if (!(obj instanceof Size)) { return false; } Size s = (Size) obj; return width == s.width && height == s.height; } @Override public int hashCode() { return width * 32713 + height; }
width of the picture
/** width of the picture */
public int width;
height of the picture
/** height of the picture */
public int height; };

The Area class is used for choosing specific metering and focus areas for the camera to use when calculating auto-exposure, auto-white balance, and auto-focus.

To find out how many simultaneous areas a given camera supports, use Parameters.getMaxNumMeteringAreas() and Parameters.getMaxNumFocusAreas(). If metering or focusing area selection is unsupported, these methods will return 0.

Each Area consists of a rectangle specifying its bounds, and a weight that determines its importance. The bounds are relative to the camera's current field of view. The coordinates are mapped so that (-1000, -1000) is always the top-left corner of the current field of view, and (1000, 1000) is always the bottom-right corner of the current field of view. Setting Areas with bounds outside that range is not allowed. Areas with zero or negative width or height are not allowed.

The weight must range from 1 to 1000, and represents a weight for every pixel in the area. This means that a large metering area with the same weight as a smaller area will have more effect in the metering result. Metering areas can overlap and the driver will add the weights in the overlap region.

See Also:
Deprecated:We recommend using the new camera2 API for new applications.
/** * <p>The Area class is used for choosing specific metering and focus areas for * the camera to use when calculating auto-exposure, auto-white balance, and * auto-focus.</p> * * <p>To find out how many simultaneous areas a given camera supports, use * {@link Parameters#getMaxNumMeteringAreas()} and * {@link Parameters#getMaxNumFocusAreas()}. If metering or focusing area * selection is unsupported, these methods will return 0.</p> * * <p>Each Area consists of a rectangle specifying its bounds, and a weight * that determines its importance. The bounds are relative to the camera's * current field of view. The coordinates are mapped so that (-1000, -1000) * is always the top-left corner of the current field of view, and (1000, * 1000) is always the bottom-right corner of the current field of * view. Setting Areas with bounds outside that range is not allowed. Areas * with zero or negative width or height are not allowed.</p> * * <p>The weight must range from 1 to 1000, and represents a weight for * every pixel in the area. This means that a large metering area with * the same weight as a smaller area will have more effect in the * metering result. Metering areas can overlap and the driver * will add the weights in the overlap region.</p> * * @see Parameters#setFocusAreas(List) * @see Parameters#getFocusAreas() * @see Parameters#getMaxNumFocusAreas() * @see Parameters#setMeteringAreas(List) * @see Parameters#getMeteringAreas() * @see Parameters#getMaxNumMeteringAreas() * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public static class Area {
Create an area with specified rectangle and weight.
Params:
  • rect – the bounds of the area.
  • weight – the weight of the area.
/** * Create an area with specified rectangle and weight. * * @param rect the bounds of the area. * @param weight the weight of the area. */
public Area(Rect rect, int weight) { this.rect = rect; this.weight = weight; }
Compares obj to this area.
Params:
  • obj – the object to compare this area with.
Returns:true if the rectangle and weight of obj is the same as those of this area. false otherwise.
/** * Compares {@code obj} to this area. * * @param obj the object to compare this area with. * @return {@code true} if the rectangle and weight of {@code obj} is * the same as those of this area. {@code false} otherwise. */
@Override public boolean equals(Object obj) { if (!(obj instanceof Area)) { return false; } Area a = (Area) obj; if (rect == null) { if (a.rect != null) return false; } else { if (!rect.equals(a.rect)) return false; } return weight == a.weight; }
Bounds of the area. (-1000, -1000) represents the top-left of the camera field of view, and (1000, 1000) represents the bottom-right of the field of view. Setting bounds outside that range is not allowed. Bounds with zero or negative width or height are not allowed.
See Also:
/** * Bounds of the area. (-1000, -1000) represents the top-left of the * camera field of view, and (1000, 1000) represents the bottom-right of * the field of view. Setting bounds outside that range is not * allowed. Bounds with zero or negative width or height are not * allowed. * * @see Parameters#getFocusAreas() * @see Parameters#getMeteringAreas() */
public Rect rect;
Weight of the area. The weight must range from 1 to 1000, and represents a weight for every pixel in the area. This means that a large metering area with the same weight as a smaller area will have more effect in the metering result. Metering areas can overlap and the driver will add the weights in the overlap region.
See Also:
/** * Weight of the area. The weight must range from 1 to 1000, and * represents a weight for every pixel in the area. This means that a * large metering area with the same weight as a smaller area will have * more effect in the metering result. Metering areas can overlap and * the driver will add the weights in the overlap region. * * @see Parameters#getFocusAreas() * @see Parameters#getMeteringAreas() */
public int weight; }
Camera service settings.

To make camera parameters take effect, applications have to call Camera.setParameters(Parameters). For example, after setWhiteBalance is called, white balance is not actually changed until Camera.setParameters(Parameters) is called with the changed parameters object.

Different devices may have different camera capabilities, such as picture size or flash modes. The application should query the camera capabilities before setting parameters. For example, the application should call getSupportedColorEffects() before calling setColorEffect(String). If the camera does not support color effects, getSupportedColorEffects() will return null.

Deprecated:We recommend using the new camera2 API for new applications.
/** * Camera service settings. * * <p>To make camera parameters take effect, applications have to call * {@link Camera#setParameters(Camera.Parameters)}. For example, after * {@link Camera.Parameters#setWhiteBalance} is called, white balance is not * actually changed until {@link Camera#setParameters(Camera.Parameters)} * is called with the changed parameters object. * * <p>Different devices may have different camera capabilities, such as * picture size or flash modes. The application should query the camera * capabilities before setting parameters. For example, the application * should call {@link Camera.Parameters#getSupportedColorEffects()} before * calling {@link Camera.Parameters#setColorEffect(String)}. If the * camera does not support color effects, * {@link Camera.Parameters#getSupportedColorEffects()} will return null. * * @deprecated We recommend using the new {@link android.hardware.camera2} API for new * applications. */
@Deprecated public class Parameters { // Parameter keys to communicate with the camera driver. private static final String KEY_PREVIEW_SIZE = "preview-size"; private static final String KEY_PREVIEW_FORMAT = "preview-format"; private static final String KEY_PREVIEW_FRAME_RATE = "preview-frame-rate"; private static final String KEY_PREVIEW_FPS_RANGE = "preview-fps-range"; private static final String KEY_PICTURE_SIZE = "picture-size"; private static final String KEY_PICTURE_FORMAT = "picture-format"; private static final String KEY_JPEG_THUMBNAIL_SIZE = "jpeg-thumbnail-size"; private static final String KEY_JPEG_THUMBNAIL_WIDTH = "jpeg-thumbnail-width"; private static final String KEY_JPEG_THUMBNAIL_HEIGHT = "jpeg-thumbnail-height"; private static final String KEY_JPEG_THUMBNAIL_QUALITY = "jpeg-thumbnail-quality"; private static final String KEY_JPEG_QUALITY = "jpeg-quality"; private static final String KEY_ROTATION = "rotation"; private static final String KEY_GPS_LATITUDE = "gps-latitude"; private static final String KEY_GPS_LONGITUDE = "gps-longitude"; private static final String KEY_GPS_ALTITUDE = "gps-altitude"; private static final String KEY_GPS_TIMESTAMP = "gps-timestamp"; private static final String KEY_GPS_PROCESSING_METHOD = "gps-processing-method"; private static final String KEY_WHITE_BALANCE = "whitebalance"; private static final String KEY_EFFECT = "effect"; private static final String KEY_ANTIBANDING = "antibanding"; private static final String KEY_SCENE_MODE = "scene-mode"; private static final String KEY_FLASH_MODE = "flash-mode"; private static final String KEY_FOCUS_MODE = "focus-mode"; private static final String KEY_FOCUS_AREAS = "focus-areas"; private static final String KEY_MAX_NUM_FOCUS_AREAS = "max-num-focus-areas"; private static final String KEY_FOCAL_LENGTH = "focal-length"; private static final String KEY_HORIZONTAL_VIEW_ANGLE = "horizontal-view-angle"; private static final String KEY_VERTICAL_VIEW_ANGLE = "vertical-view-angle"; private static final String KEY_EXPOSURE_COMPENSATION = "exposure-compensation"; private static final String KEY_MAX_EXPOSURE_COMPENSATION = "max-exposure-compensation"; private static final String KEY_MIN_EXPOSURE_COMPENSATION = "min-exposure-compensation"; private static final String KEY_EXPOSURE_COMPENSATION_STEP = "exposure-compensation-step"; private static final String KEY_AUTO_EXPOSURE_LOCK = "auto-exposure-lock"; private static final String KEY_AUTO_EXPOSURE_LOCK_SUPPORTED = "auto-exposure-lock-supported"; private static final String KEY_AUTO_WHITEBALANCE_LOCK = "auto-whitebalance-lock"; private static final String KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED = "auto-whitebalance-lock-supported"; private static final String KEY_METERING_AREAS = "metering-areas"; private static final String KEY_MAX_NUM_METERING_AREAS = "max-num-metering-areas"; private static final String KEY_ZOOM = "zoom"; private static final String KEY_MAX_ZOOM = "max-zoom"; private static final String KEY_ZOOM_RATIOS = "zoom-ratios"; private static final String KEY_ZOOM_SUPPORTED = "zoom-supported"; private static final String KEY_SMOOTH_ZOOM_SUPPORTED = "smooth-zoom-supported"; private static final String KEY_FOCUS_DISTANCES = "focus-distances"; private static final String KEY_VIDEO_SIZE = "video-size"; private static final String KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO = "preferred-preview-size-for-video"; private static final String KEY_MAX_NUM_DETECTED_FACES_HW = "max-num-detected-faces-hw"; private static final String KEY_MAX_NUM_DETECTED_FACES_SW = "max-num-detected-faces-sw"; private static final String KEY_RECORDING_HINT = "recording-hint"; private static final String KEY_VIDEO_SNAPSHOT_SUPPORTED = "video-snapshot-supported"; private static final String KEY_VIDEO_STABILIZATION = "video-stabilization"; private static final String KEY_VIDEO_STABILIZATION_SUPPORTED = "video-stabilization-supported"; // Parameter key suffix for supported values. private static final String SUPPORTED_VALUES_SUFFIX = "-values"; private static final String TRUE = "true"; private static final String FALSE = "false"; // Values for white balance settings. public static final String WHITE_BALANCE_AUTO = "auto"; public static final String WHITE_BALANCE_INCANDESCENT = "incandescent"; public static final String WHITE_BALANCE_FLUORESCENT = "fluorescent"; public static final String WHITE_BALANCE_WARM_FLUORESCENT = "warm-fluorescent"; public static final String WHITE_BALANCE_DAYLIGHT = "daylight"; public static final String WHITE_BALANCE_CLOUDY_DAYLIGHT = "cloudy-daylight"; public static final String WHITE_BALANCE_TWILIGHT = "twilight"; public static final String WHITE_BALANCE_SHADE = "shade"; // Values for color effect settings. public static final String EFFECT_NONE = "none"; public static final String EFFECT_MONO = "mono"; public static final String EFFECT_NEGATIVE = "negative"; public static final String EFFECT_SOLARIZE = "solarize"; public static final String EFFECT_SEPIA = "sepia"; public static final String EFFECT_POSTERIZE = "posterize"; public static final String EFFECT_WHITEBOARD = "whiteboard"; public static final String EFFECT_BLACKBOARD = "blackboard"; public static final String EFFECT_AQUA = "aqua"; // Values for antibanding settings. public static final String ANTIBANDING_AUTO = "auto"; public static final String ANTIBANDING_50HZ = "50hz"; public static final String ANTIBANDING_60HZ = "60hz"; public static final String ANTIBANDING_OFF = "off"; // Values for flash mode settings.
Flash will not be fired.
/** * Flash will not be fired. */
public static final String FLASH_MODE_OFF = "off";
Flash will be fired automatically when required. The flash may be fired during preview, auto-focus, or snapshot depending on the driver.
/** * Flash will be fired automatically when required. The flash may be fired * during preview, auto-focus, or snapshot depending on the driver. */
public static final String FLASH_MODE_AUTO = "auto";
Flash will always be fired during snapshot. The flash may also be fired during preview or auto-focus depending on the driver.
/** * Flash will always be fired during snapshot. The flash may also be * fired during preview or auto-focus depending on the driver. */
public static final String FLASH_MODE_ON = "on";
Flash will be fired in red-eye reduction mode.
/** * Flash will be fired in red-eye reduction mode. */
public static final String FLASH_MODE_RED_EYE = "red-eye";
Constant emission of light during preview, auto-focus and snapshot. This can also be used for video recording.
/** * Constant emission of light during preview, auto-focus and snapshot. * This can also be used for video recording. */
public static final String FLASH_MODE_TORCH = "torch";
Scene mode is off.
/** * Scene mode is off. */
public static final String SCENE_MODE_AUTO = "auto";
Take photos of fast moving objects. Same as SCENE_MODE_SPORTS.
/** * Take photos of fast moving objects. Same as {@link * #SCENE_MODE_SPORTS}. */
public static final String SCENE_MODE_ACTION = "action";
Take people pictures.
/** * Take people pictures. */
public static final String SCENE_MODE_PORTRAIT = "portrait";
Take pictures on distant objects.
/** * Take pictures on distant objects. */
public static final String SCENE_MODE_LANDSCAPE = "landscape";
Take photos at night.
/** * Take photos at night. */
public static final String SCENE_MODE_NIGHT = "night";
Take people pictures at night.
/** * Take people pictures at night. */
public static final String SCENE_MODE_NIGHT_PORTRAIT = "night-portrait";
Take photos in a theater. Flash light is off.
/** * Take photos in a theater. Flash light is off. */
public static final String SCENE_MODE_THEATRE = "theatre";
Take pictures on the beach.
/** * Take pictures on the beach. */
public static final String SCENE_MODE_BEACH = "beach";
Take pictures on the snow.
/** * Take pictures on the snow. */
public static final String SCENE_MODE_SNOW = "snow";
Take sunset photos.
/** * Take sunset photos. */
public static final String SCENE_MODE_SUNSET = "sunset";
Avoid blurry pictures (for example, due to hand shake).
/** * Avoid blurry pictures (for example, due to hand shake). */
public static final String SCENE_MODE_STEADYPHOTO = "steadyphoto";
For shooting firework displays.
/** * For shooting firework displays. */
public static final String SCENE_MODE_FIREWORKS = "fireworks";
Take photos of fast moving objects. Same as SCENE_MODE_ACTION.
/** * Take photos of fast moving objects. Same as {@link * #SCENE_MODE_ACTION}. */
public static final String SCENE_MODE_SPORTS = "sports";
Take indoor low-light shot.
/** * Take indoor low-light shot. */
public static final String SCENE_MODE_PARTY = "party";
Capture the naturally warm color of scenes lit by candles.
/** * Capture the naturally warm color of scenes lit by candles. */
public static final String SCENE_MODE_CANDLELIGHT = "candlelight";
Applications are looking for a barcode. Camera driver will be optimized for barcode reading.
/** * Applications are looking for a barcode. Camera driver will be * optimized for barcode reading. */
public static final String SCENE_MODE_BARCODE = "barcode";
Capture a scene using high dynamic range imaging techniques. The camera will return an image that has an extended dynamic range compared to a regular capture. Capturing such an image may take longer than a regular capture.
/** * Capture a scene using high dynamic range imaging techniques. The * camera will return an image that has an extended dynamic range * compared to a regular capture. Capturing such an image may take * longer than a regular capture. */
public static final String SCENE_MODE_HDR = "hdr";
Auto-focus mode. Applications should call Camera.autoFocus(AutoFocusCallback) to start the focus in this mode.
/** * Auto-focus mode. Applications should call {@link * #autoFocus(AutoFocusCallback)} to start the focus in this mode. */
public static final String FOCUS_MODE_AUTO = "auto";
Focus is set at infinity. Applications should not call Camera.autoFocus(AutoFocusCallback) in this mode.
/** * Focus is set at infinity. Applications should not call * {@link #autoFocus(AutoFocusCallback)} in this mode. */
public static final String FOCUS_MODE_INFINITY = "infinity";
Macro (close-up) focus mode. Applications should call Camera.autoFocus(AutoFocusCallback) to start the focus in this mode.
/** * Macro (close-up) focus mode. Applications should call * {@link #autoFocus(AutoFocusCallback)} to start the focus in this * mode. */
public static final String FOCUS_MODE_MACRO = "macro";
Focus is fixed. The camera is always in this mode if the focus is not adjustable. If the camera has auto-focus, this mode can fix the focus, which is usually at hyperfocal distance. Applications should not call Camera.autoFocus(AutoFocusCallback) in this mode.
/** * Focus is fixed. The camera is always in this mode if the focus is not * adjustable. If the camera has auto-focus, this mode can fix the * focus, which is usually at hyperfocal distance. Applications should * not call {@link #autoFocus(AutoFocusCallback)} in this mode. */
public static final String FOCUS_MODE_FIXED = "fixed";
Extended depth of field (EDOF). Focusing is done digitally and continuously. Applications should not call Camera.autoFocus(AutoFocusCallback) in this mode.
/** * Extended depth of field (EDOF). Focusing is done digitally and * continuously. Applications should not call {@link * #autoFocus(AutoFocusCallback)} in this mode. */
public static final String FOCUS_MODE_EDOF = "edof";
Continuous auto focus mode intended for video recording. The camera continuously tries to focus. This is the best choice for video recording because the focus changes smoothly . Applications still can call Camera.takePicture(ShutterCallback, PictureCallback, PictureCallback) in this mode but the subject may not be in focus. Auto focus starts when the parameter is set.

Since API level 14, applications can call Camera.autoFocus(AutoFocusCallback) in this mode. The focus callback will immediately return with a boolean that indicates whether the focus is sharp or not. The focus position is locked after autoFocus call. If applications want to resume the continuous focus, cancelAutoFocus must be called. Restarting the preview will not resume the continuous autofocus. To stop continuous focus, applications should change the focus mode to other modes.

See Also:
/** * Continuous auto focus mode intended for video recording. The camera * continuously tries to focus. This is the best choice for video * recording because the focus changes smoothly . Applications still can * call {@link #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback)} in this mode but the * subject may not be in focus. Auto focus starts when the parameter is * set. * * <p>Since API level 14, applications can call {@link * #autoFocus(AutoFocusCallback)} in this mode. The focus callback will * immediately return with a boolean that indicates whether the focus is * sharp or not. The focus position is locked after autoFocus call. If * applications want to resume the continuous focus, cancelAutoFocus * must be called. Restarting the preview will not resume the continuous * autofocus. To stop continuous focus, applications should change the * focus mode to other modes. * * @see #FOCUS_MODE_CONTINUOUS_PICTURE */
public static final String FOCUS_MODE_CONTINUOUS_VIDEO = "continuous-video";
Continuous auto focus mode intended for taking pictures. The camera continuously tries to focus. The speed of focus change is more aggressive than FOCUS_MODE_CONTINUOUS_VIDEO. Auto focus starts when the parameter is set.

Applications can call Camera.autoFocus(AutoFocusCallback) in this mode. If the autofocus is in the middle of scanning, the focus callback will return when it completes. If the autofocus is not scanning, the focus callback will immediately return with a boolean that indicates whether the focus is sharp or not. The apps can then decide if they want to take a picture immediately or to change the focus mode to auto, and run a full autofocus cycle. The focus position is locked after autoFocus call. If applications want to resume the continuous focus, cancelAutoFocus must be called. Restarting the preview will not resume the continuous autofocus. To stop continuous focus, applications should change the focus mode to other modes.

See Also:
/** * Continuous auto focus mode intended for taking pictures. The camera * continuously tries to focus. The speed of focus change is more * aggressive than {@link #FOCUS_MODE_CONTINUOUS_VIDEO}. Auto focus * starts when the parameter is set. * * <p>Applications can call {@link #autoFocus(AutoFocusCallback)} in * this mode. If the autofocus is in the middle of scanning, the focus * callback will return when it completes. If the autofocus is not * scanning, the focus callback will immediately return with a boolean * that indicates whether the focus is sharp or not. The apps can then * decide if they want to take a picture immediately or to change the * focus mode to auto, and run a full autofocus cycle. The focus * position is locked after autoFocus call. If applications want to * resume the continuous focus, cancelAutoFocus must be called. * Restarting the preview will not resume the continuous autofocus. To * stop continuous focus, applications should change the focus mode to * other modes. * * @see #FOCUS_MODE_CONTINUOUS_VIDEO */
public static final String FOCUS_MODE_CONTINUOUS_PICTURE = "continuous-picture"; // Indices for focus distance array.
The array index of near focus distance for use with getFocusDistances(float[]).
/** * The array index of near focus distance for use with * {@link #getFocusDistances(float[])}. */
public static final int FOCUS_DISTANCE_NEAR_INDEX = 0;
The array index of optimal focus distance for use with getFocusDistances(float[]).
/** * The array index of optimal focus distance for use with * {@link #getFocusDistances(float[])}. */
public static final int FOCUS_DISTANCE_OPTIMAL_INDEX = 1;
The array index of far focus distance for use with getFocusDistances(float[]).
/** * The array index of far focus distance for use with * {@link #getFocusDistances(float[])}. */
public static final int FOCUS_DISTANCE_FAR_INDEX = 2;
The array index of minimum preview fps for use with getPreviewFpsRange(int[]) or getSupportedPreviewFpsRange().
/** * The array index of minimum preview fps for use with {@link * #getPreviewFpsRange(int[])} or {@link * #getSupportedPreviewFpsRange()}. */
public static final int PREVIEW_FPS_MIN_INDEX = 0;
The array index of maximum preview fps for use with getPreviewFpsRange(int[]) or getSupportedPreviewFpsRange().
/** * The array index of maximum preview fps for use with {@link * #getPreviewFpsRange(int[])} or {@link * #getSupportedPreviewFpsRange()}. */
public static final int PREVIEW_FPS_MAX_INDEX = 1; // Formats for setPreviewFormat and setPictureFormat. private static final String PIXEL_FORMAT_YUV422SP = "yuv422sp"; private static final String PIXEL_FORMAT_YUV420SP = "yuv420sp"; private static final String PIXEL_FORMAT_YUV422I = "yuv422i-yuyv"; private static final String PIXEL_FORMAT_YUV420P = "yuv420p"; private static final String PIXEL_FORMAT_RGB565 = "rgb565"; private static final String PIXEL_FORMAT_JPEG = "jpeg"; private static final String PIXEL_FORMAT_BAYER_RGGB = "bayer-rggb";
Order matters: Keys that are set later will take precedence over keys that are set earlier (if the two keys conflict with each other).

One example is setPreviewFpsRange(int, int) , since it conflicts with setPreviewFrameRate(int) whichever key is set later is the one that will take precedence.

/** * Order matters: Keys that are {@link #set(String, String) set} later * will take precedence over keys that are set earlier (if the two keys * conflict with each other). * * <p>One example is {@link #setPreviewFpsRange(int, int)} , since it * conflicts with {@link #setPreviewFrameRate(int)} whichever key is set later * is the one that will take precedence. * </p> */
private final LinkedHashMap<String, String> mMap; private Parameters() { mMap = new LinkedHashMap<String, String>(/*initialCapacity*/64); }
Overwrite existing parameters with a copy of the ones from other. For use by the legacy shim only.
@hide
/** * Overwrite existing parameters with a copy of the ones from {@code other}. * * <b>For use by the legacy shim only.</b> * * @hide */
public void copyFrom(Parameters other) { if (other == null) { throw new NullPointerException("other must not be null"); } mMap.putAll(other.mMap); } private Camera getOuter() { return Camera.this; }
Value equality check.
@hide
/** * Value equality check. * * @hide */
public boolean same(Parameters other) { if (this == other) { return true; } return other != null && Parameters.this.mMap.equals(other.mMap); }
Writes the current Parameters to the log.
@hide
Deprecated:
/** * Writes the current Parameters to the log. * @hide * @deprecated */
@Deprecated public void dump() { Log.e(TAG, "dump: size=" + mMap.size()); for (String k : mMap.keySet()) { Log.e(TAG, "dump: " + k + "=" + mMap.get(k)); } }
Creates a single string with all the parameters set in this Parameters object.

The unflatten(String) method does the reverse.

Returns:a String with all values from this Parameters object, in semi-colon delimited key-value pairs
/** * Creates a single string with all the parameters set in * this Parameters object. * <p>The {@link #unflatten(String)} method does the reverse.</p> * * @return a String with all values from this Parameters object, in * semi-colon delimited key-value pairs */
public String flatten() { StringBuilder flattened = new StringBuilder(128); for (String k : mMap.keySet()) { flattened.append(k); flattened.append("="); flattened.append(mMap.get(k)); flattened.append(";"); } // chop off the extra semicolon at the end flattened.deleteCharAt(flattened.length()-1); return flattened.toString(); }
Takes a flattened string of parameters and adds each one to this Parameters object.

The flatten() method does the reverse.

Params:
  • flattened – a String of parameters (key-value paired) that are semi-colon delimited
/** * Takes a flattened string of parameters and adds each one to * this Parameters object. * <p>The {@link #flatten()} method does the reverse.</p> * * @param flattened a String of parameters (key-value paired) that * are semi-colon delimited */
public void unflatten(String flattened) { mMap.clear(); TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(';'); splitter.setString(flattened); for (String kv : splitter) { int pos = kv.indexOf('='); if (pos == -1) { continue; } String k = kv.substring(0, pos); String v = kv.substring(pos + 1); mMap.put(k, v); } } public void remove(String key) { mMap.remove(key); }
Sets a String parameter.
Params:
  • key – the key name for the parameter
  • value – the String value of the parameter
/** * Sets a String parameter. * * @param key the key name for the parameter * @param value the String value of the parameter */
public void set(String key, String value) { if (key.indexOf('=') != -1 || key.indexOf(';') != -1 || key.indexOf(0) != -1) { Log.e(TAG, "Key \"" + key + "\" contains invalid character (= or ; or \\0)"); return; } if (value.indexOf('=') != -1 || value.indexOf(';') != -1 || value.indexOf(0) != -1) { Log.e(TAG, "Value \"" + value + "\" contains invalid character (= or ; or \\0)"); return; } put(key, value); }
Sets an integer parameter.
Params:
  • key – the key name for the parameter
  • value – the int value of the parameter
/** * Sets an integer parameter. * * @param key the key name for the parameter * @param value the int value of the parameter */
public void set(String key, int value) { put(key, Integer.toString(value)); } private void put(String key, String value) { /* * Remove the key if it already exists. * * This way setting a new value for an already existing key will always move * that key to be ordered the latest in the map. */ mMap.remove(key); mMap.put(key, value); } private void set(String key, List<Area> areas) { if (areas == null) { set(key, "(0,0,0,0,0)"); } else { StringBuilder buffer = new StringBuilder(); for (int i = 0; i < areas.size(); i++) { Area area = areas.get(i); Rect rect = area.rect; buffer.append('('); buffer.append(rect.left); buffer.append(','); buffer.append(rect.top); buffer.append(','); buffer.append(rect.right); buffer.append(','); buffer.append(rect.bottom); buffer.append(','); buffer.append(area.weight); buffer.append(')'); if (i != areas.size() - 1) buffer.append(','); } set(key, buffer.toString()); } }
Returns the value of a String parameter.
Params:
  • key – the key name for the parameter
Returns:the String value of the parameter
/** * Returns the value of a String parameter. * * @param key the key name for the parameter * @return the String value of the parameter */
public String get(String key) { return mMap.get(key); }
Returns the value of an integer parameter.
Params:
  • key – the key name for the parameter
Returns:the int value of the parameter
/** * Returns the value of an integer parameter. * * @param key the key name for the parameter * @return the int value of the parameter */
public int getInt(String key) { return Integer.parseInt(mMap.get(key)); }
Sets the dimensions for preview pictures. If the preview has already started, applications should stop the preview first before changing preview size. The sides of width and height are based on camera orientation. That is, the preview size is the size before it is rotated by display orientation. So applications need to consider the display orientation while setting preview size. For example, suppose the camera supports both 480x320 and 320x480 preview sizes. The application wants a 3:2 preview ratio. If the display orientation is set to 0 or 180, preview size should be set to 480x320. If the display orientation is set to 90 or 270, preview size should be set to 320x480. The display orientation should also be considered while setting picture size and thumbnail size.
Params:
  • width – the width of the pictures, in pixels
  • height – the height of the pictures, in pixels
See Also:
/** * Sets the dimensions for preview pictures. If the preview has already * started, applications should stop the preview first before changing * preview size. * * The sides of width and height are based on camera orientation. That * is, the preview size is the size before it is rotated by display * orientation. So applications need to consider the display orientation * while setting preview size. For example, suppose the camera supports * both 480x320 and 320x480 preview sizes. The application wants a 3:2 * preview ratio. If the display orientation is set to 0 or 180, preview * size should be set to 480x320. If the display orientation is set to * 90 or 270, preview size should be set to 320x480. The display * orientation should also be considered while setting picture size and * thumbnail size. * * @param width the width of the pictures, in pixels * @param height the height of the pictures, in pixels * @see #setDisplayOrientation(int) * @see #getCameraInfo(int, CameraInfo) * @see #setPictureSize(int, int) * @see #setJpegThumbnailSize(int, int) */
public void setPreviewSize(int width, int height) { String v = Integer.toString(width) + "x" + Integer.toString(height); set(KEY_PREVIEW_SIZE, v); }
Returns the dimensions setting for preview pictures.
Returns:a Size object with the width and height setting for the preview picture
/** * Returns the dimensions setting for preview pictures. * * @return a Size object with the width and height setting * for the preview picture */
public Size getPreviewSize() { String pair = get(KEY_PREVIEW_SIZE); return strToSize(pair); }
Gets the supported preview sizes.
Returns:a list of Size object. This method will always return a list with at least one element.
/** * Gets the supported preview sizes. * * @return a list of Size object. This method will always return a list * with at least one element. */
public List<Size> getSupportedPreviewSizes() { String str = get(KEY_PREVIEW_SIZE + SUPPORTED_VALUES_SUFFIX); return splitSize(str); }

Gets the supported video frame sizes that can be used by MediaRecorder.

If the returned list is not null, the returned list will contain at least one Size and one of the sizes in the returned list must be passed to MediaRecorder.setVideoSize() for camcorder application if camera is used as the video source. In this case, the size of the preview can be different from the resolution of the recorded video during video recording.

See Also:
Returns:a list of Size object if camera has separate preview and video output; otherwise, null is returned.
/** * <p>Gets the supported video frame sizes that can be used by * MediaRecorder.</p> * * <p>If the returned list is not null, the returned list will contain at * least one Size and one of the sizes in the returned list must be * passed to MediaRecorder.setVideoSize() for camcorder application if * camera is used as the video source. In this case, the size of the * preview can be different from the resolution of the recorded video * during video recording.</p> * * @return a list of Size object if camera has separate preview and * video output; otherwise, null is returned. * @see #getPreferredPreviewSizeForVideo() */
public List<Size> getSupportedVideoSizes() { String str = get(KEY_VIDEO_SIZE + SUPPORTED_VALUES_SUFFIX); return splitSize(str); }
Returns the preferred or recommended preview size (width and height) in pixels for video recording. Camcorder applications should set the preview size to a value that is not larger than the preferred preview size. In other words, the product of the width and height of the preview size should not be larger than that of the preferred preview size. In addition, we recommend to choose a preview size that has the same aspect ratio as the resolution of video to be recorded.
See Also:
Returns:the preferred preview size (width and height) in pixels for video recording if getSupportedVideoSizes() does not return null; otherwise, null is returned.
/** * Returns the preferred or recommended preview size (width and height) * in pixels for video recording. Camcorder applications should * set the preview size to a value that is not larger than the * preferred preview size. In other words, the product of the width * and height of the preview size should not be larger than that of * the preferred preview size. In addition, we recommend to choose a * preview size that has the same aspect ratio as the resolution of * video to be recorded. * * @return the preferred preview size (width and height) in pixels for * video recording if getSupportedVideoSizes() does not return * null; otherwise, null is returned. * @see #getSupportedVideoSizes() */
public Size getPreferredPreviewSizeForVideo() { String pair = get(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO); return strToSize(pair); }

Sets the dimensions for EXIF thumbnail in Jpeg picture. If applications set both width and height to 0, EXIF will not contain thumbnail.

Applications need to consider the display orientation. See setPreviewSize(int, int) for reference.

Params:
  • width – the width of the thumbnail, in pixels
  • height – the height of the thumbnail, in pixels
See Also:
/** * <p>Sets the dimensions for EXIF thumbnail in Jpeg picture. If * applications set both width and height to 0, EXIF will not contain * thumbnail.</p> * * <p>Applications need to consider the display orientation. See {@link * #setPreviewSize(int,int)} for reference.</p> * * @param width the width of the thumbnail, in pixels * @param height the height of the thumbnail, in pixels * @see #setPreviewSize(int,int) */
public void setJpegThumbnailSize(int width, int height) { set(KEY_JPEG_THUMBNAIL_WIDTH, width); set(KEY_JPEG_THUMBNAIL_HEIGHT, height); }
Returns the dimensions for EXIF thumbnail in Jpeg picture.
Returns:a Size object with the height and width setting for the EXIF thumbnails
/** * Returns the dimensions for EXIF thumbnail in Jpeg picture. * * @return a Size object with the height and width setting for the EXIF * thumbnails */
public Size getJpegThumbnailSize() { return new Size(getInt(KEY_JPEG_THUMBNAIL_WIDTH), getInt(KEY_JPEG_THUMBNAIL_HEIGHT)); }
Gets the supported jpeg thumbnail sizes.
Returns:a list of Size object. This method will always return a list with at least two elements. Size 0,0 (no thumbnail) is always supported.
/** * Gets the supported jpeg thumbnail sizes. * * @return a list of Size object. This method will always return a list * with at least two elements. Size 0,0 (no thumbnail) is always * supported. */
public List<Size> getSupportedJpegThumbnailSizes() { String str = get(KEY_JPEG_THUMBNAIL_SIZE + SUPPORTED_VALUES_SUFFIX); return splitSize(str); }
Sets the quality of the EXIF thumbnail in Jpeg picture.
Params:
  • quality – the JPEG quality of the EXIF thumbnail. The range is 1 to 100, with 100 being the best.
/** * Sets the quality of the EXIF thumbnail in Jpeg picture. * * @param quality the JPEG quality of the EXIF thumbnail. The range is 1 * to 100, with 100 being the best. */
public void setJpegThumbnailQuality(int quality) { set(KEY_JPEG_THUMBNAIL_QUALITY, quality); }
Returns the quality setting for the EXIF thumbnail in Jpeg picture.
Returns:the JPEG quality setting of the EXIF thumbnail.
/** * Returns the quality setting for the EXIF thumbnail in Jpeg picture. * * @return the JPEG quality setting of the EXIF thumbnail. */
public int getJpegThumbnailQuality() { return getInt(KEY_JPEG_THUMBNAIL_QUALITY); }
Sets Jpeg quality of captured picture.
Params:
  • quality – the JPEG quality of captured picture. The range is 1 to 100, with 100 being the best.
/** * Sets Jpeg quality of captured picture. * * @param quality the JPEG quality of captured picture. The range is 1 * to 100, with 100 being the best. */
public void setJpegQuality(int quality) { set(KEY_JPEG_QUALITY, quality); }
Returns the quality setting for the JPEG picture.
Returns:the JPEG picture quality setting.
/** * Returns the quality setting for the JPEG picture. * * @return the JPEG picture quality setting. */
public int getJpegQuality() { return getInt(KEY_JPEG_QUALITY); }
Sets the rate at which preview frames are received. This is the target frame rate. The actual frame rate depends on the driver.
Params:
  • fps – the frame rate (frames per second)
Deprecated:replaced by setPreviewFpsRange(int, int)
/** * Sets the rate at which preview frames are received. This is the * target frame rate. The actual frame rate depends on the driver. * * @param fps the frame rate (frames per second) * @deprecated replaced by {@link #setPreviewFpsRange(int,int)} */
@Deprecated public void setPreviewFrameRate(int fps) { set(KEY_PREVIEW_FRAME_RATE, fps); }
Returns the setting for the rate at which preview frames are received. This is the target frame rate. The actual frame rate depends on the driver.
Returns:the frame rate setting (frames per second)
Deprecated:replaced by getPreviewFpsRange(int[])
/** * Returns the setting for the rate at which preview frames are * received. This is the target frame rate. The actual frame rate * depends on the driver. * * @return the frame rate setting (frames per second) * @deprecated replaced by {@link #getPreviewFpsRange(int[])} */
@Deprecated public int getPreviewFrameRate() { return getInt(KEY_PREVIEW_FRAME_RATE); }
Gets the supported preview frame rates.
Returns:a list of supported preview frame rates. null if preview frame rate setting is not supported.
Deprecated:replaced by getSupportedPreviewFpsRange()
/** * Gets the supported preview frame rates. * * @return a list of supported preview frame rates. null if preview * frame rate setting is not supported. * @deprecated replaced by {@link #getSupportedPreviewFpsRange()} */
@Deprecated public List<Integer> getSupportedPreviewFrameRates() { String str = get(KEY_PREVIEW_FRAME_RATE + SUPPORTED_VALUES_SUFFIX); return splitInt(str); }
Sets the minimum and maximum preview fps. This controls the rate of preview frames received in PreviewCallback. The minimum and maximum preview fps must be one of the elements from getSupportedPreviewFpsRange.
Params:
  • min – the minimum preview fps (scaled by 1000).
  • max – the maximum preview fps (scaled by 1000).
Throws:
See Also:
/** * Sets the minimum and maximum preview fps. This controls the rate of * preview frames received in {@link PreviewCallback}. The minimum and * maximum preview fps must be one of the elements from {@link * #getSupportedPreviewFpsRange}. * * @param min the minimum preview fps (scaled by 1000). * @param max the maximum preview fps (scaled by 1000). * @throws RuntimeException if fps range is invalid. * @see #setPreviewCallbackWithBuffer(Camera.PreviewCallback) * @see #getSupportedPreviewFpsRange() */
public void setPreviewFpsRange(int min, int max) { set(KEY_PREVIEW_FPS_RANGE, "" + min + "," + max); }
Returns the current minimum and maximum preview fps. The values are one of the elements returned by getSupportedPreviewFpsRange.
See Also:
Returns:range the minimum and maximum preview fps (scaled by 1000).
/** * Returns the current minimum and maximum preview fps. The values are * one of the elements returned by {@link #getSupportedPreviewFpsRange}. * * @return range the minimum and maximum preview fps (scaled by 1000). * @see #PREVIEW_FPS_MIN_INDEX * @see #PREVIEW_FPS_MAX_INDEX * @see #getSupportedPreviewFpsRange() */
public void getPreviewFpsRange(int[] range) { if (range == null || range.length != 2) { throw new IllegalArgumentException( "range must be an array with two elements."); } splitInt(get(KEY_PREVIEW_FPS_RANGE), range); }
Gets the supported preview fps (frame-per-second) ranges. Each range contains a minimum fps and maximum fps. If minimum fps equals to maximum fps, the camera outputs frames in fixed frame rate. If not, the camera outputs frames in auto frame rate. The actual frame rate fluctuates between the minimum and the maximum. The values are multiplied by 1000 and represented in integers. For example, if frame rate is 26.623 frames per second, the value is 26623.
See Also:
Returns:a list of supported preview fps ranges. This method returns a list with at least one element. Every element is an int array of two values - minimum fps and maximum fps. The list is sorted from small to large (first by maximum fps and then minimum fps).
/** * Gets the supported preview fps (frame-per-second) ranges. Each range * contains a minimum fps and maximum fps. If minimum fps equals to * maximum fps, the camera outputs frames in fixed frame rate. If not, * the camera outputs frames in auto frame rate. The actual frame rate * fluctuates between the minimum and the maximum. The values are * multiplied by 1000 and represented in integers. For example, if frame * rate is 26.623 frames per second, the value is 26623. * * @return a list of supported preview fps ranges. This method returns a * list with at least one element. Every element is an int array * of two values - minimum fps and maximum fps. The list is * sorted from small to large (first by maximum fps and then * minimum fps). * @see #PREVIEW_FPS_MIN_INDEX * @see #PREVIEW_FPS_MAX_INDEX */
public List<int[]> getSupportedPreviewFpsRange() { String str = get(KEY_PREVIEW_FPS_RANGE + SUPPORTED_VALUES_SUFFIX); return splitRange(str); }
Sets the image format for preview pictures.

If this is never called, the default format will be ImageFormat.NV21, which uses the NV21 encoding format.

Use getSupportedPreviewFormats to get a list of the available preview formats.

It is strongly recommended that either ImageFormat.NV21 or ImageFormat.YV12 is used, since they are supported by all camera devices.

For YV12, the image buffer that is received is not necessarily tightly packed, as there may be padding at the end of each row of pixel data, as described in ImageFormat.YV12. For camera callback data, it can be assumed that the stride of the Y and UV data is the smallest possible that meets the alignment requirements. That is, if the preview size is width x height, then the following equations describe the buffer index for the beginning of row y for the Y plane and row c for the U and V planes:


yStride   = (int) ceil(width / 16.0) * 16;
uvStride  = (int) ceil( (yStride / 2) / 16.0) * 16;
ySize     = yStride * height;
uvSize    = uvStride * height / 2;
yRowIndex = yStride * y;
uRowIndex = ySize + uvSize + uvStride * c;
vRowIndex = ySize + uvStride * c;
size      = ySize + uvSize * 2;
 
Params:
  • pixel_format – the desired preview picture format, defined by one of the ImageFormat constants. (E.g., ImageFormat.NV21 (default), or ImageFormat.YV12)
See Also:
/** * Sets the image format for preview pictures. * <p>If this is never called, the default format will be * {@link android.graphics.ImageFormat#NV21}, which * uses the NV21 encoding format.</p> * * <p>Use {@link Parameters#getSupportedPreviewFormats} to get a list of * the available preview formats. * * <p>It is strongly recommended that either * {@link android.graphics.ImageFormat#NV21} or * {@link android.graphics.ImageFormat#YV12} is used, since * they are supported by all camera devices.</p> * * <p>For YV12, the image buffer that is received is not necessarily * tightly packed, as there may be padding at the end of each row of * pixel data, as described in * {@link android.graphics.ImageFormat#YV12}. For camera callback data, * it can be assumed that the stride of the Y and UV data is the * smallest possible that meets the alignment requirements. That is, if * the preview size is <var>width x height</var>, then the following * equations describe the buffer index for the beginning of row * <var>y</var> for the Y plane and row <var>c</var> for the U and V * planes: * * <pre>{@code * yStride = (int) ceil(width / 16.0) * 16; * uvStride = (int) ceil( (yStride / 2) / 16.0) * 16; * ySize = yStride * height; * uvSize = uvStride * height / 2; * yRowIndex = yStride * y; * uRowIndex = ySize + uvSize + uvStride * c; * vRowIndex = ySize + uvStride * c; * size = ySize + uvSize * 2; * } *</pre> * * @param pixel_format the desired preview picture format, defined by * one of the {@link android.graphics.ImageFormat} constants. (E.g., * <var>ImageFormat.NV21</var> (default), or * <var>ImageFormat.YV12</var>) * * @see android.graphics.ImageFormat * @see android.hardware.Camera.Parameters#getSupportedPreviewFormats */
public void setPreviewFormat(int pixel_format) { String s = cameraFormatForPixelFormat(pixel_format); if (s == null) { throw new IllegalArgumentException( "Invalid pixel_format=" + pixel_format); } set(KEY_PREVIEW_FORMAT, s); }
Returns the image format for preview frames got from PreviewCallback.
See Also:
Returns:the preview format.
/** * Returns the image format for preview frames got from * {@link PreviewCallback}. * * @return the preview format. * @see android.graphics.ImageFormat * @see #setPreviewFormat */
public int getPreviewFormat() { return pixelFormatForCameraFormat(get(KEY_PREVIEW_FORMAT)); }
Gets the supported preview formats. ImageFormat.NV21 is always supported. ImageFormat.YV12 is always supported since API level 12.
See Also:
Returns:a list of supported preview formats. This method will always return a list with at least one element.
/** * Gets the supported preview formats. {@link android.graphics.ImageFormat#NV21} * is always supported. {@link android.graphics.ImageFormat#YV12} * is always supported since API level 12. * * @return a list of supported preview formats. This method will always * return a list with at least one element. * @see android.graphics.ImageFormat * @see #setPreviewFormat */
public List<Integer> getSupportedPreviewFormats() { String str = get(KEY_PREVIEW_FORMAT + SUPPORTED_VALUES_SUFFIX); ArrayList<Integer> formats = new ArrayList<Integer>(); for (String s : split(str)) { int f = pixelFormatForCameraFormat(s); if (f == ImageFormat.UNKNOWN) continue; formats.add(f); } return formats; }

Sets the dimensions for pictures.

Applications need to consider the display orientation. See setPreviewSize(int, int) for reference.

Params:
  • width – the width for pictures, in pixels
  • height – the height for pictures, in pixels
See Also:
/** * <p>Sets the dimensions for pictures.</p> * * <p>Applications need to consider the display orientation. See {@link * #setPreviewSize(int,int)} for reference.</p> * * @param width the width for pictures, in pixels * @param height the height for pictures, in pixels * @see #setPreviewSize(int,int) * */
public void setPictureSize(int width, int height) { String v = Integer.toString(width) + "x" + Integer.toString(height); set(KEY_PICTURE_SIZE, v); }
Returns the dimension setting for pictures.
Returns:a Size object with the height and width setting for pictures
/** * Returns the dimension setting for pictures. * * @return a Size object with the height and width setting * for pictures */
public Size getPictureSize() { String pair = get(KEY_PICTURE_SIZE); return strToSize(pair); }
Gets the supported picture sizes.
Returns:a list of supported picture sizes. This method will always return a list with at least one element.
/** * Gets the supported picture sizes. * * @return a list of supported picture sizes. This method will always * return a list with at least one element. */
public List<Size> getSupportedPictureSizes() { String str = get(KEY_PICTURE_SIZE + SUPPORTED_VALUES_SUFFIX); return splitSize(str); }
Sets the image format for pictures.
Params:
  • pixel_format – the desired picture format (ImageFormat.NV21, ImageFormat.RGB_565, or ImageFormat.JPEG)
See Also:
/** * Sets the image format for pictures. * * @param pixel_format the desired picture format * (<var>ImageFormat.NV21</var>, * <var>ImageFormat.RGB_565</var>, or * <var>ImageFormat.JPEG</var>) * @see android.graphics.ImageFormat */
public void setPictureFormat(int pixel_format) { String s = cameraFormatForPixelFormat(pixel_format); if (s == null) { throw new IllegalArgumentException( "Invalid pixel_format=" + pixel_format); } set(KEY_PICTURE_FORMAT, s); }
Returns the image format for pictures.
See Also:
Returns:the picture format
/** * Returns the image format for pictures. * * @return the picture format * @see android.graphics.ImageFormat */
public int getPictureFormat() { return pixelFormatForCameraFormat(get(KEY_PICTURE_FORMAT)); }
Gets the supported picture formats.
See Also:
Returns:supported picture formats. This method will always return a list with at least one element.
/** * Gets the supported picture formats. * * @return supported picture formats. This method will always return a * list with at least one element. * @see android.graphics.ImageFormat */
public List<Integer> getSupportedPictureFormats() { String str = get(KEY_PICTURE_FORMAT + SUPPORTED_VALUES_SUFFIX); ArrayList<Integer> formats = new ArrayList<Integer>(); for (String s : split(str)) { int f = pixelFormatForCameraFormat(s); if (f == ImageFormat.UNKNOWN) continue; formats.add(f); } return formats; } private String cameraFormatForPixelFormat(int pixel_format) { switch(pixel_format) { case ImageFormat.NV16: return PIXEL_FORMAT_YUV422SP; case ImageFormat.NV21: return PIXEL_FORMAT_YUV420SP; case ImageFormat.YUY2: return PIXEL_FORMAT_YUV422I; case ImageFormat.YV12: return PIXEL_FORMAT_YUV420P; case ImageFormat.RGB_565: return PIXEL_FORMAT_RGB565; case ImageFormat.JPEG: return PIXEL_FORMAT_JPEG; default: return null; } } private int pixelFormatForCameraFormat(String format) { if (format == null) return ImageFormat.UNKNOWN; if (format.equals(PIXEL_FORMAT_YUV422SP)) return ImageFormat.NV16; if (format.equals(PIXEL_FORMAT_YUV420SP)) return ImageFormat.NV21; if (format.equals(PIXEL_FORMAT_YUV422I)) return ImageFormat.YUY2; if (format.equals(PIXEL_FORMAT_YUV420P)) return ImageFormat.YV12; if (format.equals(PIXEL_FORMAT_RGB565)) return ImageFormat.RGB_565; if (format.equals(PIXEL_FORMAT_JPEG)) return ImageFormat.JPEG; return ImageFormat.UNKNOWN; }
Sets the clockwise rotation angle in degrees relative to the orientation of the camera. This affects the pictures returned from JPEG PictureCallback. The camera driver may set orientation in the EXIF header without rotating the picture. Or the driver may rotate the picture and the EXIF thumbnail. If the Jpeg picture is rotated, the orientation in the EXIF header will be missing or 1 (row #0 is top and column #0 is left side).

If applications want to rotate the picture to match the orientation of what users see, apps should use OrientationEventListener and CameraInfo. The value from OrientationEventListener is relative to the natural orientation of the device. CameraInfo.orientation is the angle between camera orientation and natural device orientation. The sum of the two is the rotation angle for back-facing camera. The difference of the two is the rotation angle for front-facing camera. Note that the JPEG pictures of front-facing cameras are not mirrored as in preview display.

For example, suppose the natural orientation of the device is portrait. The device is rotated 270 degrees clockwise, so the device orientation is 270. Suppose a back-facing camera sensor is mounted in landscape and the top side of the camera sensor is aligned with the right edge of the display in natural orientation. So the camera orientation is 90. The rotation should be set to 0 (270 + 90).

The reference code is as follows.

public void onOrientationChanged(int orientation) {
    if (orientation == ORIENTATION_UNKNOWN) return;
    android.hardware.Camera.CameraInfo info =
           new android.hardware.Camera.CameraInfo();
    android.hardware.Camera.getCameraInfo(cameraId, info);
    orientation = (orientation + 45) / 90 * 90;
    int rotation = 0;
    if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
        rotation = (info.orientation - orientation + 360) % 360;
    } else {  // back-facing camera
        rotation = (info.orientation + orientation) % 360;
    }
    mParameters.setRotation(rotation);
}
Params:
  • rotation – The rotation angle in degrees relative to the orientation of the camera. Rotation can only be 0, 90, 180 or 270.
Throws:
See Also:
/** * Sets the clockwise rotation angle in degrees relative to the * orientation of the camera. This affects the pictures returned from * JPEG {@link PictureCallback}. The camera driver may set orientation * in the EXIF header without rotating the picture. Or the driver may * rotate the picture and the EXIF thumbnail. If the Jpeg picture is * rotated, the orientation in the EXIF header will be missing or 1 (row * #0 is top and column #0 is left side). * * <p> * If applications want to rotate the picture to match the orientation * of what users see, apps should use * {@link android.view.OrientationEventListener} and * {@link android.hardware.Camera.CameraInfo}. The value from * OrientationEventListener is relative to the natural orientation of * the device. CameraInfo.orientation is the angle between camera * orientation and natural device orientation. The sum of the two is the * rotation angle for back-facing camera. The difference of the two is * the rotation angle for front-facing camera. Note that the JPEG * pictures of front-facing cameras are not mirrored as in preview * display. * * <p> * For example, suppose the natural orientation of the device is * portrait. The device is rotated 270 degrees clockwise, so the device * orientation is 270. Suppose a back-facing camera sensor is mounted in * landscape and the top side of the camera sensor is aligned with the * right edge of the display in natural orientation. So the camera * orientation is 90. The rotation should be set to 0 (270 + 90). * * <p>The reference code is as follows. * * <pre> * public void onOrientationChanged(int orientation) { * if (orientation == ORIENTATION_UNKNOWN) return; * android.hardware.Camera.CameraInfo info = * new android.hardware.Camera.CameraInfo(); * android.hardware.Camera.getCameraInfo(cameraId, info); * orientation = (orientation + 45) / 90 * 90; * int rotation = 0; * if (info.facing == CameraInfo.CAMERA_FACING_FRONT) { * rotation = (info.orientation - orientation + 360) % 360; * } else { // back-facing camera * rotation = (info.orientation + orientation) % 360; * } * mParameters.setRotation(rotation); * } * </pre> * * @param rotation The rotation angle in degrees relative to the * orientation of the camera. Rotation can only be 0, * 90, 180 or 270. * @throws IllegalArgumentException if rotation value is invalid. * @see android.view.OrientationEventListener * @see #getCameraInfo(int, CameraInfo) */
public void setRotation(int rotation) { if (rotation == 0 || rotation == 90 || rotation == 180 || rotation == 270) { set(KEY_ROTATION, Integer.toString(rotation)); } else { throw new IllegalArgumentException( "Invalid rotation=" + rotation); } }
Sets GPS latitude coordinate. This will be stored in JPEG EXIF header.
Params:
  • latitude – GPS latitude coordinate.
/** * Sets GPS latitude coordinate. This will be stored in JPEG EXIF * header. * * @param latitude GPS latitude coordinate. */
public void setGpsLatitude(double latitude) { set(KEY_GPS_LATITUDE, Double.toString(latitude)); }
Sets GPS longitude coordinate. This will be stored in JPEG EXIF header.
Params:
  • longitude – GPS longitude coordinate.
/** * Sets GPS longitude coordinate. This will be stored in JPEG EXIF * header. * * @param longitude GPS longitude coordinate. */
public void setGpsLongitude(double longitude) { set(KEY_GPS_LONGITUDE, Double.toString(longitude)); }
Sets GPS altitude. This will be stored in JPEG EXIF header.
Params:
  • altitude – GPS altitude in meters.
/** * Sets GPS altitude. This will be stored in JPEG EXIF header. * * @param altitude GPS altitude in meters. */
public void setGpsAltitude(double altitude) { set(KEY_GPS_ALTITUDE, Double.toString(altitude)); }
Sets GPS timestamp. This will be stored in JPEG EXIF header.
Params:
  • timestamp – GPS timestamp (UTC in seconds since January 1, 1970).
/** * Sets GPS timestamp. This will be stored in JPEG EXIF header. * * @param timestamp GPS timestamp (UTC in seconds since January 1, * 1970). */
public void setGpsTimestamp(long timestamp) { set(KEY_GPS_TIMESTAMP, Long.toString(timestamp)); }
Sets GPS processing method. The method will be stored in a UTF-8 string up to 31 bytes long, in the JPEG EXIF header.
Params:
  • processing_method – The processing method to get this location.
/** * Sets GPS processing method. The method will be stored in a UTF-8 string up to 31 bytes * long, in the JPEG EXIF header. * * @param processing_method The processing method to get this location. */
public void setGpsProcessingMethod(String processing_method) { set(KEY_GPS_PROCESSING_METHOD, processing_method); }
Removes GPS latitude, longitude, altitude, and timestamp from the parameters.
/** * Removes GPS latitude, longitude, altitude, and timestamp from the * parameters. */
public void removeGpsData() { remove(KEY_GPS_LATITUDE); remove(KEY_GPS_LONGITUDE); remove(KEY_GPS_ALTITUDE); remove(KEY_GPS_TIMESTAMP); remove(KEY_GPS_PROCESSING_METHOD); }
Gets the current white balance setting.
See Also:
Returns:current white balance. null if white balance setting is not supported.
/** * Gets the current white balance setting. * * @return current white balance. null if white balance setting is not * supported. * @see #WHITE_BALANCE_AUTO * @see #WHITE_BALANCE_INCANDESCENT * @see #WHITE_BALANCE_FLUORESCENT * @see #WHITE_BALANCE_WARM_FLUORESCENT * @see #WHITE_BALANCE_DAYLIGHT * @see #WHITE_BALANCE_CLOUDY_DAYLIGHT * @see #WHITE_BALANCE_TWILIGHT * @see #WHITE_BALANCE_SHADE * */
public String getWhiteBalance() { return get(KEY_WHITE_BALANCE); }
Sets the white balance. Changing the setting will release the auto-white balance lock. It is recommended not to change white balance and AWB lock at the same time.
Params:
  • value – new white balance.
See Also:
/** * Sets the white balance. Changing the setting will release the * auto-white balance lock. It is recommended not to change white * balance and AWB lock at the same time. * * @param value new white balance. * @see #getWhiteBalance() * @see #setAutoWhiteBalanceLock(boolean) */
public void setWhiteBalance(String value) { String oldValue = get(KEY_WHITE_BALANCE); if (same(value, oldValue)) return; set(KEY_WHITE_BALANCE, value); set(KEY_AUTO_WHITEBALANCE_LOCK, FALSE); }
Gets the supported white balance.
See Also:
Returns:a list of supported white balance. null if white balance setting is not supported.
/** * Gets the supported white balance. * * @return a list of supported white balance. null if white balance * setting is not supported. * @see #getWhiteBalance() */
public List<String> getSupportedWhiteBalance() { String str = get(KEY_WHITE_BALANCE + SUPPORTED_VALUES_SUFFIX); return split(str); }
Gets the current color effect setting.
See Also:
Returns:current color effect. null if color effect setting is not supported.
/** * Gets the current color effect setting. * * @return current color effect. null if color effect * setting is not supported. * @see #EFFECT_NONE * @see #EFFECT_MONO * @see #EFFECT_NEGATIVE * @see #EFFECT_SOLARIZE * @see #EFFECT_SEPIA * @see #EFFECT_POSTERIZE * @see #EFFECT_WHITEBOARD * @see #EFFECT_BLACKBOARD * @see #EFFECT_AQUA */
public String getColorEffect() { return get(KEY_EFFECT); }
Sets the current color effect setting.
Params:
  • value – new color effect.
See Also:
/** * Sets the current color effect setting. * * @param value new color effect. * @see #getColorEffect() */
public void setColorEffect(String value) { set(KEY_EFFECT, value); }
Gets the supported color effects.
See Also:
Returns:a list of supported color effects. null if color effect setting is not supported.
/** * Gets the supported color effects. * * @return a list of supported color effects. null if color effect * setting is not supported. * @see #getColorEffect() */
public List<String> getSupportedColorEffects() { String str = get(KEY_EFFECT + SUPPORTED_VALUES_SUFFIX); return split(str); }
Gets the current antibanding setting.
See Also:
Returns:current antibanding. null if antibanding setting is not supported.
/** * Gets the current antibanding setting. * * @return current antibanding. null if antibanding setting is not * supported. * @see #ANTIBANDING_AUTO * @see #ANTIBANDING_50HZ * @see #ANTIBANDING_60HZ * @see #ANTIBANDING_OFF */
public String getAntibanding() { return get(KEY_ANTIBANDING); }
Sets the antibanding.
Params:
  • antibanding – new antibanding value.
See Also:
/** * Sets the antibanding. * * @param antibanding new antibanding value. * @see #getAntibanding() */
public void setAntibanding(String antibanding) { set(KEY_ANTIBANDING, antibanding); }
Gets the supported antibanding values.
See Also:
Returns:a list of supported antibanding values. null if antibanding setting is not supported.
/** * Gets the supported antibanding values. * * @return a list of supported antibanding values. null if antibanding * setting is not supported. * @see #getAntibanding() */
public List<String> getSupportedAntibanding() { String str = get(KEY_ANTIBANDING + SUPPORTED_VALUES_SUFFIX); return split(str); }
Gets the current scene mode setting.
See Also:
Returns:one of SCENE_MODE_XXX string constant. null if scene mode setting is not supported.
/** * Gets the current scene mode setting. * * @return one of SCENE_MODE_XXX string constant. null if scene mode * setting is not supported. * @see #SCENE_MODE_AUTO * @see #SCENE_MODE_ACTION * @see #SCENE_MODE_PORTRAIT * @see #SCENE_MODE_LANDSCAPE * @see #SCENE_MODE_NIGHT * @see #SCENE_MODE_NIGHT_PORTRAIT * @see #SCENE_MODE_THEATRE * @see #SCENE_MODE_BEACH * @see #SCENE_MODE_SNOW * @see #SCENE_MODE_SUNSET * @see #SCENE_MODE_STEADYPHOTO * @see #SCENE_MODE_FIREWORKS * @see #SCENE_MODE_SPORTS * @see #SCENE_MODE_PARTY * @see #SCENE_MODE_CANDLELIGHT * @see #SCENE_MODE_BARCODE */
public String getSceneMode() { return get(KEY_SCENE_MODE); }
Sets the scene mode. Changing scene mode may override other parameters (such as flash mode, focus mode, white balance). For example, suppose originally flash mode is on and supported flash modes are on/off. In night scene mode, both flash mode and supported flash mode may be changed to off. After setting scene mode, applications should call getParameters to know if some parameters are changed.
Params:
  • value – scene mode.
See Also:
/** * Sets the scene mode. Changing scene mode may override other * parameters (such as flash mode, focus mode, white balance). For * example, suppose originally flash mode is on and supported flash * modes are on/off. In night scene mode, both flash mode and supported * flash mode may be changed to off. After setting scene mode, * applications should call getParameters to know if some parameters are * changed. * * @param value scene mode. * @see #getSceneMode() */
public void setSceneMode(String value) { set(KEY_SCENE_MODE, value); }
Gets the supported scene modes.
See Also:
Returns:a list of supported scene modes. null if scene mode setting is not supported.
/** * Gets the supported scene modes. * * @return a list of supported scene modes. null if scene mode setting * is not supported. * @see #getSceneMode() */
public List<String> getSupportedSceneModes() { String str = get(KEY_SCENE_MODE + SUPPORTED_VALUES_SUFFIX); return split(str); }
Gets the current flash mode setting.
See Also:
Returns:current flash mode. null if flash mode setting is not supported.
/** * Gets the current flash mode setting. * * @return current flash mode. null if flash mode setting is not * supported. * @see #FLASH_MODE_OFF * @see #FLASH_MODE_AUTO * @see #FLASH_MODE_ON * @see #FLASH_MODE_RED_EYE * @see #FLASH_MODE_TORCH */
public String getFlashMode() { return get(KEY_FLASH_MODE); }
Sets the flash mode.
Params:
  • value – flash mode.
See Also:
/** * Sets the flash mode. * * @param value flash mode. * @see #getFlashMode() */
public void setFlashMode(String value) { set(KEY_FLASH_MODE, value); }
Gets the supported flash modes.
See Also:
Returns:a list of supported flash modes. null if flash mode setting is not supported.
/** * Gets the supported flash modes. * * @return a list of supported flash modes. null if flash mode setting * is not supported. * @see #getFlashMode() */
public List<String> getSupportedFlashModes() { String str = get(KEY_FLASH_MODE + SUPPORTED_VALUES_SUFFIX); return split(str); }
Gets the current focus mode setting.
See Also:
Returns:current focus mode. This method will always return a non-null value. Applications should call Camera.autoFocus(AutoFocusCallback) to start the focus if focus mode is FOCUS_MODE_AUTO or FOCUS_MODE_MACRO.
/** * Gets the current focus mode setting. * * @return current focus mode. This method will always return a non-null * value. Applications should call {@link * #autoFocus(AutoFocusCallback)} to start the focus if focus * mode is FOCUS_MODE_AUTO or FOCUS_MODE_MACRO. * @see #FOCUS_MODE_AUTO * @see #FOCUS_MODE_INFINITY * @see #FOCUS_MODE_MACRO * @see #FOCUS_MODE_FIXED * @see #FOCUS_MODE_EDOF * @see #FOCUS_MODE_CONTINUOUS_VIDEO */
public String getFocusMode() { return get(KEY_FOCUS_MODE); }
Sets the focus mode.
Params:
  • value – focus mode.
See Also:
/** * Sets the focus mode. * * @param value focus mode. * @see #getFocusMode() */
public void setFocusMode(String value) { set(KEY_FOCUS_MODE, value); }
Gets the supported focus modes.
See Also:
Returns:a list of supported focus modes. This method will always return a list with at least one element.
/** * Gets the supported focus modes. * * @return a list of supported focus modes. This method will always * return a list with at least one element. * @see #getFocusMode() */
public List<String> getSupportedFocusModes() { String str = get(KEY_FOCUS_MODE + SUPPORTED_VALUES_SUFFIX); return split(str); }
Gets the focal length (in millimeter) of the camera.
Returns:the focal length. Returns -1.0 when the device doesn't report focal length information.
/** * Gets the focal length (in millimeter) of the camera. * * @return the focal length. Returns -1.0 when the device * doesn't report focal length information. */
public float getFocalLength() { return Float.parseFloat(get(KEY_FOCAL_LENGTH)); }
Gets the horizontal angle of view in degrees.
Returns:horizontal angle of view. Returns -1.0 when the device doesn't report view angle information.
/** * Gets the horizontal angle of view in degrees. * * @return horizontal angle of view. Returns -1.0 when the device * doesn't report view angle information. */
public float getHorizontalViewAngle() { return Float.parseFloat(get(KEY_HORIZONTAL_VIEW_ANGLE)); }
Gets the vertical angle of view in degrees.
Returns:vertical angle of view. Returns -1.0 when the device doesn't report view angle information.
/** * Gets the vertical angle of view in degrees. * * @return vertical angle of view. Returns -1.0 when the device * doesn't report view angle information. */
public float getVerticalViewAngle() { return Float.parseFloat(get(KEY_VERTICAL_VIEW_ANGLE)); }
Gets the current exposure compensation index.
Returns:current exposure compensation index. The range is getMinExposureCompensation to getMaxExposureCompensation. 0 means exposure is not adjusted.
/** * Gets the current exposure compensation index. * * @return current exposure compensation index. The range is {@link * #getMinExposureCompensation} to {@link * #getMaxExposureCompensation}. 0 means exposure is not * adjusted. */
public int getExposureCompensation() { return getInt(KEY_EXPOSURE_COMPENSATION, 0); }
Sets the exposure compensation index.
Params:
  • value – exposure compensation index. The valid value range is from getMinExposureCompensation (inclusive) to getMaxExposureCompensation (inclusive). 0 means exposure is not adjusted. Application should call getMinExposureCompensation and getMaxExposureCompensation to know if exposure compensation is supported.
/** * Sets the exposure compensation index. * * @param value exposure compensation index. The valid value range is * from {@link #getMinExposureCompensation} (inclusive) to {@link * #getMaxExposureCompensation} (inclusive). 0 means exposure is * not adjusted. Application should call * getMinExposureCompensation and getMaxExposureCompensation to * know if exposure compensation is supported. */
public void setExposureCompensation(int value) { set(KEY_EXPOSURE_COMPENSATION, value); }
Gets the maximum exposure compensation index.
Returns:maximum exposure compensation index (>=0). If both this method and getMinExposureCompensation return 0, exposure compensation is not supported.
/** * Gets the maximum exposure compensation index. * * @return maximum exposure compensation index (>=0). If both this * method and {@link #getMinExposureCompensation} return 0, * exposure compensation is not supported. */
public int getMaxExposureCompensation() { return getInt(KEY_MAX_EXPOSURE_COMPENSATION, 0); }
Gets the minimum exposure compensation index.
Returns:minimum exposure compensation index (<=0). If both this method and getMaxExposureCompensation return 0, exposure compensation is not supported.
/** * Gets the minimum exposure compensation index. * * @return minimum exposure compensation index (<=0). If both this * method and {@link #getMaxExposureCompensation} return 0, * exposure compensation is not supported. */
public int getMinExposureCompensation() { return getInt(KEY_MIN_EXPOSURE_COMPENSATION, 0); }
Gets the exposure compensation step.
Returns:exposure compensation step. Applications can get EV by multiplying the exposure compensation index and step. Ex: if exposure compensation index is -6 and step is 0.333333333, EV is -2.
/** * Gets the exposure compensation step. * * @return exposure compensation step. Applications can get EV by * multiplying the exposure compensation index and step. Ex: if * exposure compensation index is -6 and step is 0.333333333, EV * is -2. */
public float getExposureCompensationStep() { return getFloat(KEY_EXPOSURE_COMPENSATION_STEP, 0); }

Sets the auto-exposure lock state. Applications should check isAutoExposureLockSupported before using this method.

If set to true, the camera auto-exposure routine will immediately pause until the lock is set to false. Exposure compensation settings changes will still take effect while auto-exposure is locked.

If auto-exposure is already locked, setting this to true again has no effect (the driver will not recalculate exposure values).

Stopping preview with Camera.stopPreview(), or triggering still image capture with Camera.takePicture(ShutterCallback, PictureCallback, PictureCallback), will not change the lock.

Exposure compensation, auto-exposure lock, and auto-white balance lock can be used to capture an exposure-bracketed burst of images, for example.

Auto-exposure state, including the lock state, will not be maintained after camera Camera.release() is called. Locking auto-exposure after Camera.open() but before the first call to Camera.startPreview() will not allow the auto-exposure routine to run at all, and may result in severely over- or under-exposed images.

Params:
  • toggle – new state of the auto-exposure lock. True means that auto-exposure is locked, false means that the auto-exposure routine is free to run normally.
See Also:
/** * <p>Sets the auto-exposure lock state. Applications should check * {@link #isAutoExposureLockSupported} before using this method.</p> * * <p>If set to true, the camera auto-exposure routine will immediately * pause until the lock is set to false. Exposure compensation settings * changes will still take effect while auto-exposure is locked.</p> * * <p>If auto-exposure is already locked, setting this to true again has * no effect (the driver will not recalculate exposure values).</p> * * <p>Stopping preview with {@link #stopPreview()}, or triggering still * image capture with {@link #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback)}, will not change the * lock.</p> * * <p>Exposure compensation, auto-exposure lock, and auto-white balance * lock can be used to capture an exposure-bracketed burst of images, * for example.</p> * * <p>Auto-exposure state, including the lock state, will not be * maintained after camera {@link #release()} is called. Locking * auto-exposure after {@link #open()} but before the first call to * {@link #startPreview()} will not allow the auto-exposure routine to * run at all, and may result in severely over- or under-exposed * images.</p> * * @param toggle new state of the auto-exposure lock. True means that * auto-exposure is locked, false means that the auto-exposure * routine is free to run normally. * * @see #getAutoExposureLock() */
public void setAutoExposureLock(boolean toggle) { set(KEY_AUTO_EXPOSURE_LOCK, toggle ? TRUE : FALSE); }
Gets the state of the auto-exposure lock. Applications should check isAutoExposureLockSupported before using this method. See setAutoExposureLock for details about the lock.
See Also:
Returns:State of the auto-exposure lock. Returns true if auto-exposure is currently locked, and false otherwise.
/** * Gets the state of the auto-exposure lock. Applications should check * {@link #isAutoExposureLockSupported} before using this method. See * {@link #setAutoExposureLock} for details about the lock. * * @return State of the auto-exposure lock. Returns true if * auto-exposure is currently locked, and false otherwise. * * @see #setAutoExposureLock(boolean) * */
public boolean getAutoExposureLock() { String str = get(KEY_AUTO_EXPOSURE_LOCK); return TRUE.equals(str); }
Returns true if auto-exposure locking is supported. Applications should call this before trying to lock auto-exposure. See setAutoExposureLock for details about the lock.
See Also:
Returns:true if auto-exposure lock is supported.
/** * Returns true if auto-exposure locking is supported. Applications * should call this before trying to lock auto-exposure. See * {@link #setAutoExposureLock} for details about the lock. * * @return true if auto-exposure lock is supported. * @see #setAutoExposureLock(boolean) * */
public boolean isAutoExposureLockSupported() { String str = get(KEY_AUTO_EXPOSURE_LOCK_SUPPORTED); return TRUE.equals(str); }

Sets the auto-white balance lock state. Applications should check isAutoWhiteBalanceLockSupported before using this method.

If set to true, the camera auto-white balance routine will immediately pause until the lock is set to false.

If auto-white balance is already locked, setting this to true again has no effect (the driver will not recalculate white balance values).

Stopping preview with Camera.stopPreview(), or triggering still image capture with Camera.takePicture(ShutterCallback, PictureCallback, PictureCallback), will not change the the lock.

Changing the white balance mode with setWhiteBalance will release the auto-white balance lock if it is set.

Exposure compensation, AE lock, and AWB lock can be used to capture an exposure-bracketed burst of images, for example. Auto-white balance state, including the lock state, will not be maintained after camera Camera.release() is called. Locking auto-white balance after Camera.open() but before the first call to Camera.startPreview() will not allow the auto-white balance routine to run at all, and may result in severely incorrect color in captured images.

Params:
  • toggle – new state of the auto-white balance lock. True means that auto-white balance is locked, false means that the auto-white balance routine is free to run normally.
See Also:
/** * <p>Sets the auto-white balance lock state. Applications should check * {@link #isAutoWhiteBalanceLockSupported} before using this * method.</p> * * <p>If set to true, the camera auto-white balance routine will * immediately pause until the lock is set to false.</p> * * <p>If auto-white balance is already locked, setting this to true * again has no effect (the driver will not recalculate white balance * values).</p> * * <p>Stopping preview with {@link #stopPreview()}, or triggering still * image capture with {@link #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback)}, will not change the * the lock.</p> * * <p> Changing the white balance mode with {@link #setWhiteBalance} * will release the auto-white balance lock if it is set.</p> * * <p>Exposure compensation, AE lock, and AWB lock can be used to * capture an exposure-bracketed burst of images, for example. * Auto-white balance state, including the lock state, will not be * maintained after camera {@link #release()} is called. Locking * auto-white balance after {@link #open()} but before the first call to * {@link #startPreview()} will not allow the auto-white balance routine * to run at all, and may result in severely incorrect color in captured * images.</p> * * @param toggle new state of the auto-white balance lock. True means * that auto-white balance is locked, false means that the * auto-white balance routine is free to run normally. * * @see #getAutoWhiteBalanceLock() * @see #setWhiteBalance(String) */
public void setAutoWhiteBalanceLock(boolean toggle) { set(KEY_AUTO_WHITEBALANCE_LOCK, toggle ? TRUE : FALSE); }
Gets the state of the auto-white balance lock. Applications should check isAutoWhiteBalanceLockSupported before using this method. See setAutoWhiteBalanceLock for details about the lock.
See Also:
Returns:State of the auto-white balance lock. Returns true if auto-white balance is currently locked, and false otherwise.
/** * Gets the state of the auto-white balance lock. Applications should * check {@link #isAutoWhiteBalanceLockSupported} before using this * method. See {@link #setAutoWhiteBalanceLock} for details about the * lock. * * @return State of the auto-white balance lock. Returns true if * auto-white balance is currently locked, and false * otherwise. * * @see #setAutoWhiteBalanceLock(boolean) * */
public boolean getAutoWhiteBalanceLock() { String str = get(KEY_AUTO_WHITEBALANCE_LOCK); return TRUE.equals(str); }
Returns true if auto-white balance locking is supported. Applications should call this before trying to lock auto-white balance. See setAutoWhiteBalanceLock for details about the lock.
See Also:
Returns:true if auto-white balance lock is supported.
/** * Returns true if auto-white balance locking is supported. Applications * should call this before trying to lock auto-white balance. See * {@link #setAutoWhiteBalanceLock} for details about the lock. * * @return true if auto-white balance lock is supported. * @see #setAutoWhiteBalanceLock(boolean) * */
public boolean isAutoWhiteBalanceLockSupported() { String str = get(KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED); return TRUE.equals(str); }
Gets current zoom value. This also works when smooth zoom is in progress. Applications should check isZoomSupported before using this method.
Returns:the current zoom value. The range is 0 to getMaxZoom. 0 means the camera is not zoomed.
/** * Gets current zoom value. This also works when smooth zoom is in * progress. Applications should check {@link #isZoomSupported} before * using this method. * * @return the current zoom value. The range is 0 to {@link * #getMaxZoom}. 0 means the camera is not zoomed. */
public int getZoom() { return getInt(KEY_ZOOM, 0); }
Sets current zoom value. If the camera is zoomed (value > 0), the actual picture size may be smaller than picture size setting. Applications can check the actual picture size after picture is returned from PictureCallback. The preview size remains the same in zoom. Applications should check isZoomSupported before using this method.
Params:
  • value – zoom value. The valid range is 0 to getMaxZoom.
/** * Sets current zoom value. If the camera is zoomed (value > 0), the * actual picture size may be smaller than picture size setting. * Applications can check the actual picture size after picture is * returned from {@link PictureCallback}. The preview size remains the * same in zoom. Applications should check {@link #isZoomSupported} * before using this method. * * @param value zoom value. The valid range is 0 to {@link #getMaxZoom}. */
public void setZoom(int value) { set(KEY_ZOOM, value); }
Returns true if zoom is supported. Applications should call this before using other zoom methods.
Returns:true if zoom is supported.
/** * Returns true if zoom is supported. Applications should call this * before using other zoom methods. * * @return true if zoom is supported. */
public boolean isZoomSupported() { String str = get(KEY_ZOOM_SUPPORTED); return TRUE.equals(str); }
Gets the maximum zoom value allowed for snapshot. This is the maximum value that applications can set to setZoom(int). Applications should call isZoomSupported before using this method. This value may change in different preview size. Applications should call this again after setting preview size.
Returns:the maximum zoom value supported by the camera.
/** * Gets the maximum zoom value allowed for snapshot. This is the maximum * value that applications can set to {@link #setZoom(int)}. * Applications should call {@link #isZoomSupported} before using this * method. This value may change in different preview size. Applications * should call this again after setting preview size. * * @return the maximum zoom value supported by the camera. */
public int getMaxZoom() { return getInt(KEY_MAX_ZOOM, 0); }
Gets the zoom ratios of all zoom values. Applications should check isZoomSupported before using this method.
Returns:the zoom ratios in 1/100 increments. Ex: a zoom of 3.2x is returned as 320. The number of elements is getMaxZoom + 1. The list is sorted from small to large. The first element is always 100. The last element is the zoom ratio of the maximum zoom value.
/** * Gets the zoom ratios of all zoom values. Applications should check * {@link #isZoomSupported} before using this method. * * @return the zoom ratios in 1/100 increments. Ex: a zoom of 3.2x is * returned as 320. The number of elements is {@link * #getMaxZoom} + 1. The list is sorted from small to large. The * first element is always 100. The last element is the zoom * ratio of the maximum zoom value. */
public List<Integer> getZoomRatios() { return splitInt(get(KEY_ZOOM_RATIOS)); }
Returns true if smooth zoom is supported. Applications should call this before using other smooth zoom methods.
Returns:true if smooth zoom is supported.
/** * Returns true if smooth zoom is supported. Applications should call * this before using other smooth zoom methods. * * @return true if smooth zoom is supported. */
public boolean isSmoothZoomSupported() { String str = get(KEY_SMOOTH_ZOOM_SUPPORTED); return TRUE.equals(str); }

Gets the distances from the camera to where an object appears to be in focus. The object is sharpest at the optimal focus distance. The depth of field is the far focus distance minus near focus distance.

Focus distances may change after calling Camera.autoFocus(AutoFocusCallback), Camera.cancelAutoFocus, or Camera.startPreview(). Applications can call Camera.getParameters() and this method anytime to get the latest focus distances. If the focus mode is FOCUS_MODE_CONTINUOUS_VIDEO, focus distances may change from time to time.

This method is intended to estimate the distance between the camera and the subject. After autofocus, the subject distance may be within near and far focus distance. However, the precision depends on the camera hardware, autofocus algorithm, the focus area, and the scene. The error can be large and it should be only used as a reference.

Far focus distance >= optimal focus distance >= near focus distance. If the focus distance is infinity, the value will be Float.POSITIVE_INFINITY.

Params:
  • output – focus distances in meters. output must be a float array with three elements. Near focus distance, optimal focus distance, and far focus distance will be filled in the array.
See Also:
/** * <p>Gets the distances from the camera to where an object appears to be * in focus. The object is sharpest at the optimal focus distance. The * depth of field is the far focus distance minus near focus distance.</p> * * <p>Focus distances may change after calling {@link * #autoFocus(AutoFocusCallback)}, {@link #cancelAutoFocus}, or {@link * #startPreview()}. Applications can call {@link #getParameters()} * and this method anytime to get the latest focus distances. If the * focus mode is FOCUS_MODE_CONTINUOUS_VIDEO, focus distances may change * from time to time.</p> * * <p>This method is intended to estimate the distance between the camera * and the subject. After autofocus, the subject distance may be within * near and far focus distance. However, the precision depends on the * camera hardware, autofocus algorithm, the focus area, and the scene. * The error can be large and it should be only used as a reference.</p> * * <p>Far focus distance >= optimal focus distance >= near focus distance. * If the focus distance is infinity, the value will be * {@code Float.POSITIVE_INFINITY}.</p> * * @param output focus distances in meters. output must be a float * array with three elements. Near focus distance, optimal focus * distance, and far focus distance will be filled in the array. * @see #FOCUS_DISTANCE_NEAR_INDEX * @see #FOCUS_DISTANCE_OPTIMAL_INDEX * @see #FOCUS_DISTANCE_FAR_INDEX */
public void getFocusDistances(float[] output) { if (output == null || output.length != 3) { throw new IllegalArgumentException( "output must be a float array with three elements."); } splitFloat(get(KEY_FOCUS_DISTANCES), output); }
Gets the maximum number of focus areas supported. This is the maximum length of the list in setFocusAreas(List<Area>) and getFocusAreas().
See Also:
Returns:the maximum number of focus areas supported by the camera.
/** * Gets the maximum number of focus areas supported. This is the maximum * length of the list in {@link #setFocusAreas(List)} and * {@link #getFocusAreas()}. * * @return the maximum number of focus areas supported by the camera. * @see #getFocusAreas() */
public int getMaxNumFocusAreas() { return getInt(KEY_MAX_NUM_FOCUS_AREAS, 0); }

Gets the current focus areas. Camera driver uses the areas to decide focus.

Before using this API or setFocusAreas(List<Area>), apps should call getMaxNumFocusAreas() to know the maximum number of focus areas first. If the value is 0, focus area is not supported.

Each focus area is a rectangle with specified weight. The direction is relative to the sensor orientation, that is, what the sensor sees. The direction is not affected by the rotation or mirroring of Camera.setDisplayOrientation(int). Coordinates of the rectangle range from -1000 to 1000. (-1000, -1000) is the upper left point. (1000, 1000) is the lower right point. The width and height of focus areas cannot be 0 or negative.

The weight must range from 1 to 1000. The weight should be interpreted as a per-pixel weight - all pixels in the area have the specified weight. This means a small area with the same weight as a larger area will have less influence on the focusing than the larger area. Focus areas can partially overlap and the driver will add the weights in the overlap region.

A special case of a null focus area list means the driver is free to select focus targets as it wants. For example, the driver may use more signals to select focus areas and change them dynamically. Apps can set the focus area list to null if they want the driver to completely control focusing.

Focus areas are relative to the current field of view (getZoom()). No matter what the zoom level is, (-1000,-1000) represents the top of the currently visible camera frame. The focus area cannot be set to be outside the current field of view, even when using zoom.

Focus area only has effect if the current focus mode is FOCUS_MODE_AUTO, FOCUS_MODE_MACRO, FOCUS_MODE_CONTINUOUS_VIDEO, or FOCUS_MODE_CONTINUOUS_PICTURE.

Returns:a list of current focus areas
/** * <p>Gets the current focus areas. Camera driver uses the areas to decide * focus.</p> * * <p>Before using this API or {@link #setFocusAreas(List)}, apps should * call {@link #getMaxNumFocusAreas()} to know the maximum number of * focus areas first. If the value is 0, focus area is not supported.</p> * * <p>Each focus area is a rectangle with specified weight. The direction * is relative to the sensor orientation, that is, what the sensor sees. * The direction is not affected by the rotation or mirroring of * {@link #setDisplayOrientation(int)}. Coordinates of the rectangle * range from -1000 to 1000. (-1000, -1000) is the upper left point. * (1000, 1000) is the lower right point. The width and height of focus * areas cannot be 0 or negative.</p> * * <p>The weight must range from 1 to 1000. The weight should be * interpreted as a per-pixel weight - all pixels in the area have the * specified weight. This means a small area with the same weight as a * larger area will have less influence on the focusing than the larger * area. Focus areas can partially overlap and the driver will add the * weights in the overlap region.</p> * * <p>A special case of a {@code null} focus area list means the driver is * free to select focus targets as it wants. For example, the driver may * use more signals to select focus areas and change them * dynamically. Apps can set the focus area list to {@code null} if they * want the driver to completely control focusing.</p> * * <p>Focus areas are relative to the current field of view * ({@link #getZoom()}). No matter what the zoom level is, (-1000,-1000) * represents the top of the currently visible camera frame. The focus * area cannot be set to be outside the current field of view, even * when using zoom.</p> * * <p>Focus area only has effect if the current focus mode is * {@link #FOCUS_MODE_AUTO}, {@link #FOCUS_MODE_MACRO}, * {@link #FOCUS_MODE_CONTINUOUS_VIDEO}, or * {@link #FOCUS_MODE_CONTINUOUS_PICTURE}.</p> * * @return a list of current focus areas */
public List<Area> getFocusAreas() { return splitArea(get(KEY_FOCUS_AREAS)); }
Sets focus areas. See getFocusAreas() for documentation.
Params:
  • focusAreas – the focus areas
See Also:
/** * Sets focus areas. See {@link #getFocusAreas()} for documentation. * * @param focusAreas the focus areas * @see #getFocusAreas() */
public void setFocusAreas(List<Area> focusAreas) { set(KEY_FOCUS_AREAS, focusAreas); }
Gets the maximum number of metering areas supported. This is the maximum length of the list in setMeteringAreas(List<Area>) and getMeteringAreas().
See Also:
Returns:the maximum number of metering areas supported by the camera.
/** * Gets the maximum number of metering areas supported. This is the * maximum length of the list in {@link #setMeteringAreas(List)} and * {@link #getMeteringAreas()}. * * @return the maximum number of metering areas supported by the camera. * @see #getMeteringAreas() */
public int getMaxNumMeteringAreas() { return getInt(KEY_MAX_NUM_METERING_AREAS, 0); }

Gets the current metering areas. Camera driver uses these areas to decide exposure.

Before using this API or setMeteringAreas(List<Area>), apps should call getMaxNumMeteringAreas() to know the maximum number of metering areas first. If the value is 0, metering area is not supported.

Each metering area is a rectangle with specified weight. The direction is relative to the sensor orientation, that is, what the sensor sees. The direction is not affected by the rotation or mirroring of Camera.setDisplayOrientation(int). Coordinates of the rectangle range from -1000 to 1000. (-1000, -1000) is the upper left point. (1000, 1000) is the lower right point. The width and height of metering areas cannot be 0 or negative.

The weight must range from 1 to 1000, and represents a weight for every pixel in the area. This means that a large metering area with the same weight as a smaller area will have more effect in the metering result. Metering areas can partially overlap and the driver will add the weights in the overlap region.

A special case of a null metering area list means the driver is free to meter as it chooses. For example, the driver may use more signals to select metering areas and change them dynamically. Apps can set the metering area list to null if they want the driver to completely control metering.

Metering areas are relative to the current field of view (getZoom()). No matter what the zoom level is, (-1000,-1000) represents the top of the currently visible camera frame. The metering area cannot be set to be outside the current field of view, even when using zoom.

No matter what metering areas are, the final exposure are compensated by setExposureCompensation(int).

Returns:a list of current metering areas
/** * <p>Gets the current metering areas. Camera driver uses these areas to * decide exposure.</p> * * <p>Before using this API or {@link #setMeteringAreas(List)}, apps should * call {@link #getMaxNumMeteringAreas()} to know the maximum number of * metering areas first. If the value is 0, metering area is not * supported.</p> * * <p>Each metering area is a rectangle with specified weight. The * direction is relative to the sensor orientation, that is, what the * sensor sees. The direction is not affected by the rotation or * mirroring of {@link #setDisplayOrientation(int)}. Coordinates of the * rectangle range from -1000 to 1000. (-1000, -1000) is the upper left * point. (1000, 1000) is the lower right point. The width and height of * metering areas cannot be 0 or negative.</p> * * <p>The weight must range from 1 to 1000, and represents a weight for * every pixel in the area. This means that a large metering area with * the same weight as a smaller area will have more effect in the * metering result. Metering areas can partially overlap and the driver * will add the weights in the overlap region.</p> * * <p>A special case of a {@code null} metering area list means the driver * is free to meter as it chooses. For example, the driver may use more * signals to select metering areas and change them dynamically. Apps * can set the metering area list to {@code null} if they want the * driver to completely control metering.</p> * * <p>Metering areas are relative to the current field of view * ({@link #getZoom()}). No matter what the zoom level is, (-1000,-1000) * represents the top of the currently visible camera frame. The * metering area cannot be set to be outside the current field of view, * even when using zoom.</p> * * <p>No matter what metering areas are, the final exposure are compensated * by {@link #setExposureCompensation(int)}.</p> * * @return a list of current metering areas */
public List<Area> getMeteringAreas() { return splitArea(get(KEY_METERING_AREAS)); }
Sets metering areas. See getMeteringAreas() for documentation.
Params:
  • meteringAreas – the metering areas
See Also:
/** * Sets metering areas. See {@link #getMeteringAreas()} for * documentation. * * @param meteringAreas the metering areas * @see #getMeteringAreas() */
public void setMeteringAreas(List<Area> meteringAreas) { set(KEY_METERING_AREAS, meteringAreas); }
Gets the maximum number of detected faces supported. This is the maximum length of the list returned from FaceDetectionListener. If the return value is 0, face detection of the specified type is not supported.
See Also:
Returns:the maximum number of detected face supported by the camera.
/** * Gets the maximum number of detected faces supported. This is the * maximum length of the list returned from {@link FaceDetectionListener}. * If the return value is 0, face detection of the specified type is not * supported. * * @return the maximum number of detected face supported by the camera. * @see #startFaceDetection() */
public int getMaxNumDetectedFaces() { return getInt(KEY_MAX_NUM_DETECTED_FACES_HW, 0); }
Sets recording mode hint. This tells the camera that the intent of the application is to record videos MediaRecorder.start(), not to take still pictures Camera.takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback). Using this hint can allow MediaRecorder.start() to start faster or with fewer glitches on output. This should be called before starting preview for the best result, but can be changed while the preview is active. The default value is false. The app can still call takePicture() when the hint is true or call MediaRecorder.start() when the hint is false. But the performance may be worse.
Params:
  • hint – true if the apps intend to record videos using MediaRecorder.
/** * Sets recording mode hint. This tells the camera that the intent of * the application is to record videos {@link * android.media.MediaRecorder#start()}, not to take still pictures * {@link #takePicture(Camera.ShutterCallback, Camera.PictureCallback, * Camera.PictureCallback, Camera.PictureCallback)}. Using this hint can * allow MediaRecorder.start() to start faster or with fewer glitches on * output. This should be called before starting preview for the best * result, but can be changed while the preview is active. The default * value is false. * * The app can still call takePicture() when the hint is true or call * MediaRecorder.start() when the hint is false. But the performance may * be worse. * * @param hint true if the apps intend to record videos using * {@link android.media.MediaRecorder}. */
public void setRecordingHint(boolean hint) { set(KEY_RECORDING_HINT, hint ? TRUE : FALSE); }

Returns true if video snapshot is supported. That is, applications can call Camera.takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback) during recording. Applications do not need to call Camera.startPreview() after taking a picture. The preview will be still active. Other than that, taking a picture during recording is identical to taking a picture normally. All settings and methods related to takePicture work identically. Ex: getPictureSize(), getSupportedPictureSizes(), setJpegQuality(int), setRotation(int), and etc. The picture will have an EXIF header. FLASH_MODE_AUTO and FLASH_MODE_ON also still work, but the video will record the flash.

Applications can set shutter callback as null to avoid the shutter sound. It is also recommended to set raw picture and post view callbacks to null to avoid the interrupt of preview display.

Field-of-view of the recorded video may be different from that of the captured pictures. The maximum size of a video snapshot may be smaller than that for regular still captures. If the current picture size is set higher than can be supported by video snapshot, the picture will be captured at the maximum supported size instead.

Returns:true if video snapshot is supported.
/** * <p>Returns true if video snapshot is supported. That is, applications * can call {@link #takePicture(Camera.ShutterCallback, * Camera.PictureCallback, Camera.PictureCallback, * Camera.PictureCallback)} during recording. Applications do not need * to call {@link #startPreview()} after taking a picture. The preview * will be still active. Other than that, taking a picture during * recording is identical to taking a picture normally. All settings and * methods related to takePicture work identically. Ex: * {@link #getPictureSize()}, {@link #getSupportedPictureSizes()}, * {@link #setJpegQuality(int)}, {@link #setRotation(int)}, and etc. The * picture will have an EXIF header. {@link #FLASH_MODE_AUTO} and * {@link #FLASH_MODE_ON} also still work, but the video will record the * flash.</p> * * <p>Applications can set shutter callback as null to avoid the shutter * sound. It is also recommended to set raw picture and post view * callbacks to null to avoid the interrupt of preview display.</p> * * <p>Field-of-view of the recorded video may be different from that of the * captured pictures. The maximum size of a video snapshot may be * smaller than that for regular still captures. If the current picture * size is set higher than can be supported by video snapshot, the * picture will be captured at the maximum supported size instead.</p> * * @return true if video snapshot is supported. */
public boolean isVideoSnapshotSupported() { String str = get(KEY_VIDEO_SNAPSHOT_SUPPORTED); return TRUE.equals(str); }

Enables and disables video stabilization. Use isVideoStabilizationSupported to determine if calling this method is valid.

Video stabilization reduces the shaking due to the motion of the camera in both the preview stream and in recorded videos, including data received from the preview callback. It does not reduce motion blur in images captured with takePicture.

Video stabilization can be enabled and disabled while preview or recording is active, but toggling it may cause a jump in the video stream that may be undesirable in a recorded video.

Params:
  • toggle – Set to true to enable video stabilization, and false to disable video stabilization.
See Also:
/** * <p>Enables and disables video stabilization. Use * {@link #isVideoStabilizationSupported} to determine if calling this * method is valid.</p> * * <p>Video stabilization reduces the shaking due to the motion of the * camera in both the preview stream and in recorded videos, including * data received from the preview callback. It does not reduce motion * blur in images captured with * {@link Camera#takePicture takePicture}.</p> * * <p>Video stabilization can be enabled and disabled while preview or * recording is active, but toggling it may cause a jump in the video * stream that may be undesirable in a recorded video.</p> * * @param toggle Set to true to enable video stabilization, and false to * disable video stabilization. * @see #isVideoStabilizationSupported() * @see #getVideoStabilization() */
public void setVideoStabilization(boolean toggle) { set(KEY_VIDEO_STABILIZATION, toggle ? TRUE : FALSE); }
Get the current state of video stabilization. See setVideoStabilization for details of video stabilization.
See Also:
Returns:true if video stabilization is enabled
/** * Get the current state of video stabilization. See * {@link #setVideoStabilization} for details of video stabilization. * * @return true if video stabilization is enabled * @see #isVideoStabilizationSupported() * @see #setVideoStabilization(boolean) */
public boolean getVideoStabilization() { String str = get(KEY_VIDEO_STABILIZATION); return TRUE.equals(str); }
Returns true if video stabilization is supported. See setVideoStabilization for details of video stabilization.
See Also:
Returns:true if video stabilization is supported
/** * Returns true if video stabilization is supported. See * {@link #setVideoStabilization} for details of video stabilization. * * @return true if video stabilization is supported * @see #setVideoStabilization(boolean) * @see #getVideoStabilization() */
public boolean isVideoStabilizationSupported() { String str = get(KEY_VIDEO_STABILIZATION_SUPPORTED); return TRUE.equals(str); } // Splits a comma delimited string to an ArrayList of String. // Return null if the passing string is null or the size is 0. private ArrayList<String> split(String str) { if (str == null) return null; TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(','); splitter.setString(str); ArrayList<String> substrings = new ArrayList<String>(); for (String s : splitter) { substrings.add(s); } return substrings; } // Splits a comma delimited string to an ArrayList of Integer. // Return null if the passing string is null or the size is 0. private ArrayList<Integer> splitInt(String str) { if (str == null) return null; TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(','); splitter.setString(str); ArrayList<Integer> substrings = new ArrayList<Integer>(); for (String s : splitter) { substrings.add(Integer.parseInt(s)); } if (substrings.size() == 0) return null; return substrings; } private void splitInt(String str, int[] output) { if (str == null) return; TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(','); splitter.setString(str); int index = 0; for (String s : splitter) { output[index++] = Integer.parseInt(s); } } // Splits a comma delimited string to an ArrayList of Float. private void splitFloat(String str, float[] output) { if (str == null) return; TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(','); splitter.setString(str); int index = 0; for (String s : splitter) { output[index++] = Float.parseFloat(s); } } // Returns the value of a float parameter. private float getFloat(String key, float defaultValue) { try { return Float.parseFloat(mMap.get(key)); } catch (NumberFormatException ex) { return defaultValue; } } // Returns the value of a integer parameter. private int getInt(String key, int defaultValue) { try { return Integer.parseInt(mMap.get(key)); } catch (NumberFormatException ex) { return defaultValue; } } // Splits a comma delimited string to an ArrayList of Size. // Return null if the passing string is null or the size is 0. private ArrayList<Size> splitSize(String str) { if (str == null) return null; TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(','); splitter.setString(str); ArrayList<Size> sizeList = new ArrayList<Size>(); for (String s : splitter) { Size size = strToSize(s); if (size != null) sizeList.add(size); } if (sizeList.size() == 0) return null; return sizeList; } // Parses a string (ex: "480x320") to Size object. // Return null if the passing string is null. private Size strToSize(String str) { if (str == null) return null; int pos = str.indexOf('x'); if (pos != -1) { String width = str.substring(0, pos); String height = str.substring(pos + 1); return new Size(Integer.parseInt(width), Integer.parseInt(height)); } Log.e(TAG, "Invalid size parameter string=" + str); return null; } // Splits a comma delimited string to an ArrayList of int array. // Example string: "(10000,26623),(10000,30000)". Return null if the // passing string is null or the size is 0. private ArrayList<int[]> splitRange(String str) { if (str == null || str.charAt(0) != '(' || str.charAt(str.length() - 1) != ')') { Log.e(TAG, "Invalid range list string=" + str); return null; } ArrayList<int[]> rangeList = new ArrayList<int[]>(); int endIndex, fromIndex = 1; do { int[] range = new int[2]; endIndex = str.indexOf("),(", fromIndex); if (endIndex == -1) endIndex = str.length() - 1; splitInt(str.substring(fromIndex, endIndex), range); rangeList.add(range); fromIndex = endIndex + 3; } while (endIndex != str.length() - 1); if (rangeList.size() == 0) return null; return rangeList; } // Splits a comma delimited string to an ArrayList of Area objects. // Example string: "(-10,-10,0,0,300),(0,0,10,10,700)". Return null if // the passing string is null or the size is 0 or (0,0,0,0,0). private ArrayList<Area> splitArea(String str) { if (str == null || str.charAt(0) != '(' || str.charAt(str.length() - 1) != ')') { Log.e(TAG, "Invalid area string=" + str); return null; } ArrayList<Area> result = new ArrayList<Area>(); int endIndex, fromIndex = 1; int[] array = new int[5]; do { endIndex = str.indexOf("),(", fromIndex); if (endIndex == -1) endIndex = str.length() - 1; splitInt(str.substring(fromIndex, endIndex), array); Rect rect = new Rect(array[0], array[1], array[2], array[3]); result.add(new Area(rect, array[4])); fromIndex = endIndex + 3; } while (endIndex != str.length() - 1); if (result.size() == 0) return null; if (result.size() == 1) { Area area = result.get(0); Rect rect = area.rect; if (rect.left == 0 && rect.top == 0 && rect.right == 0 && rect.bottom == 0 && area.weight == 0) { return null; } } return result; } private boolean same(String s1, String s2) { if (s1 == null && s2 == null) return true; if (s1 != null && s1.equals(s2)) return true; return false; } }; }