Camera: Enhance support for monochrome camera
- Add new Color Filter Array enum. - Clarify doc for Bayer pattern related metadata. - Add DngCreator support for monochrome camera raw. Test: Camera CTS Test: Capture a monochrome DNG image and inspect with LightRoom Bug: 70216652 Change-Id: I329f224e3763dd5c777815a3cbb9dd7bd198c038
This commit is contained in:
parent
3c91545b8f
commit
a8d36036f2
@ -16628,6 +16628,8 @@ package android.hardware.camera2 {
|
|||||||
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3; // 0x3
|
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3; // 0x3
|
||||||
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2; // 0x2
|
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2; // 0x2
|
||||||
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1; // 0x1
|
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1; // 0x1
|
||||||
|
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO = 5; // 0x5
|
||||||
|
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR = 6; // 0x6
|
||||||
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4; // 0x4
|
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4; // 0x4
|
||||||
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB = 0; // 0x0
|
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB = 0; // 0x0
|
||||||
field public static final int SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME = 1; // 0x1
|
field public static final int SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME = 1; // 0x1
|
||||||
|
@ -28,8 +28,8 @@ import android.hardware.camera2.utils.ArrayUtils;
|
|||||||
import android.hardware.camera2.utils.TypeReference;
|
import android.hardware.camera2.utils.TypeReference;
|
||||||
import android.util.Rational;
|
import android.util.Rational;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -2668,7 +2668,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
/**
|
/**
|
||||||
* <p>The arrangement of color filters on sensor;
|
* <p>The arrangement of color filters on sensor;
|
||||||
* represents the colors in the top-left 2x2 section of
|
* represents the colors in the top-left 2x2 section of
|
||||||
* the sensor, in reading order.</p>
|
* the sensor, in reading order, for a Bayer camera, or the
|
||||||
|
* light spectrum it captures for MONOCHROME camera.</p>
|
||||||
* <p><b>Possible values:</b>
|
* <p><b>Possible values:</b>
|
||||||
* <ul>
|
* <ul>
|
||||||
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB RGGB}</li>
|
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB RGGB}</li>
|
||||||
@ -2676,6 +2677,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG GBRG}</li>
|
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG GBRG}</li>
|
||||||
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR BGGR}</li>
|
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR BGGR}</li>
|
||||||
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB RGB}</li>
|
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB RGB}</li>
|
||||||
|
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO MONO}</li>
|
||||||
|
* <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR NIR}</li>
|
||||||
* </ul></p>
|
* </ul></p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
* <p><b>Full capability</b> -
|
* <p><b>Full capability</b> -
|
||||||
@ -2688,6 +2691,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG
|
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG
|
||||||
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR
|
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR
|
||||||
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB
|
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB
|
||||||
|
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO
|
||||||
|
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR
|
||||||
*/
|
*/
|
||||||
@PublicKey
|
@PublicKey
|
||||||
public static final Key<Integer> SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =
|
public static final Key<Integer> SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =
|
||||||
@ -2919,6 +2924,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* <p>Some devices may choose to provide a second set of calibration
|
* <p>Some devices may choose to provide a second set of calibration
|
||||||
* information for improved quality, including
|
* information for improved quality, including
|
||||||
* {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2} and its corresponding matrices.</p>
|
* {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2} and its corresponding matrices.</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Possible values:</b>
|
* <p><b>Possible values:</b>
|
||||||
* <ul>
|
* <ul>
|
||||||
* <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT DAYLIGHT}</li>
|
* <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT DAYLIGHT}</li>
|
||||||
@ -2981,6 +2988,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* <p>If this key is present, then {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2},
|
* <p>If this key is present, then {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2},
|
||||||
* {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}, and
|
* {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}, and
|
||||||
* {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2} will also be present.</p>
|
* {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2} will also be present.</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Range of valid values:</b><br>
|
* <p><b>Range of valid values:</b><br>
|
||||||
* Any value listed in {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}</p>
|
* Any value listed in {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
@ -3006,6 +3015,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* colorspace) into this camera device's native sensor color
|
* colorspace) into this camera device's native sensor color
|
||||||
* space under the first reference illuminant
|
* space under the first reference illuminant
|
||||||
* ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}).</p>
|
* ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}).</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
||||||
*
|
*
|
||||||
@ -3029,6 +3040,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}).</p>
|
* ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}).</p>
|
||||||
* <p>This matrix will only be present if the second reference
|
* <p>This matrix will only be present if the second reference
|
||||||
* illuminant is present.</p>
|
* illuminant is present.</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
||||||
*
|
*
|
||||||
@ -3053,6 +3066,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* and the CIE XYZ colorspace when calculating this transform will
|
* and the CIE XYZ colorspace when calculating this transform will
|
||||||
* match the standard white point for the first reference illuminant
|
* match the standard white point for the first reference illuminant
|
||||||
* (i.e. no chromatic adaptation will be applied by this transform).</p>
|
* (i.e. no chromatic adaptation will be applied by this transform).</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
||||||
*
|
*
|
||||||
@ -3079,6 +3094,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* (i.e. no chromatic adaptation will be applied by this transform).</p>
|
* (i.e. no chromatic adaptation will be applied by this transform).</p>
|
||||||
* <p>This matrix will only be present if the second reference
|
* <p>This matrix will only be present if the second reference
|
||||||
* illuminant is present.</p>
|
* illuminant is present.</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
||||||
*
|
*
|
||||||
@ -3101,6 +3118,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* this matrix is chosen so that the standard white point for this reference
|
* this matrix is chosen so that the standard white point for this reference
|
||||||
* illuminant in the reference sensor colorspace is mapped to D50 in the
|
* illuminant in the reference sensor colorspace is mapped to D50 in the
|
||||||
* CIE XYZ colorspace.</p>
|
* CIE XYZ colorspace.</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
||||||
*
|
*
|
||||||
@ -3125,6 +3144,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* CIE XYZ colorspace.</p>
|
* CIE XYZ colorspace.</p>
|
||||||
* <p>This matrix will only be present if the second reference
|
* <p>This matrix will only be present if the second reference
|
||||||
* illuminant is present.</p>
|
* illuminant is present.</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
* <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
|
||||||
*
|
*
|
||||||
@ -3153,6 +3174,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
|
|||||||
* level values. For raw capture in particular, it is recommended to use
|
* level values. For raw capture in particular, it is recommended to use
|
||||||
* pixels from {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} to calculate black
|
* pixels from {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} to calculate black
|
||||||
* level values for each frame.</p>
|
* level values for each frame.</p>
|
||||||
|
* <p>For a MONOCHROME camera device, all of the 2x2 channels must have the same values.</p>
|
||||||
* <p><b>Range of valid values:</b><br>
|
* <p><b>Range of valid values:</b><br>
|
||||||
* >= 0 for each.</p>
|
* >= 0 for each.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
|
@ -357,7 +357,7 @@ public abstract class CameraDevice implements AutoCloseable {
|
|||||||
* </p>
|
* </p>
|
||||||
*
|
*
|
||||||
* <p>MONOCHROME-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}
|
* <p>MONOCHROME-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}
|
||||||
* includes {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME MONOCHROME})
|
* includes {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME MONOCHROME}) devices
|
||||||
* supporting {@link android.graphics.ImageFormat#Y8 Y8} support substituting {@code YUV}
|
* supporting {@link android.graphics.ImageFormat#Y8 Y8} support substituting {@code YUV}
|
||||||
* streams with {@code Y8} in all guaranteed stream combinations for the device's hardware level
|
* streams with {@code Y8} in all guaranteed stream combinations for the device's hardware level
|
||||||
* and capabilities.</p>
|
* and capabilities.</p>
|
||||||
|
@ -880,11 +880,15 @@ public abstract class CameraMetadata<TKey> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* <p>The camera device is a monochrome camera that doesn't contain a color filter array,
|
* <p>The camera device is a monochrome camera that doesn't contain a color filter array,
|
||||||
* and the pixel values on U and V planes are all 128.</p>
|
* and for YUV_420_888 stream, the pixel values on U and V planes are all 128.</p>
|
||||||
* <p>A MONOCHROME camera must support the guaranteed stream combinations required for
|
* <p>A MONOCHROME camera must support the guaranteed stream combinations required for
|
||||||
* its device level and capabilities. Additionally, if the monochrome camera device
|
* its device level and capabilities. Additionally, if the monochrome camera device
|
||||||
* supports Y8 format, all mandatory stream combination requirements related to {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888} apply
|
* supports Y8 format, all mandatory stream combination requirements related to {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888} apply
|
||||||
* to {@link android.graphics.ImageFormat#Y8 Y8} as well.</p>
|
* to {@link android.graphics.ImageFormat#Y8 Y8} as well. There are no
|
||||||
|
* mandatory stream combination requirements with regard to
|
||||||
|
* {@link android.graphics.ImageFormat#Y8 Y8} for Bayer camera devices.</p>
|
||||||
|
* <p>Starting from Android Q, the SENSOR_INFO_COLOR_FILTER_ARRANGEMENT of a MONOCHROME
|
||||||
|
* camera will be either MONO or NIR.</p>
|
||||||
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
|
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
|
||||||
*/
|
*/
|
||||||
public static final int REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12;
|
public static final int REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12;
|
||||||
@ -937,6 +941,23 @@ public abstract class CameraMetadata<TKey> {
|
|||||||
*/
|
*/
|
||||||
public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4;
|
public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <p>Sensor doesn't have any Bayer color filter.
|
||||||
|
* Such sensor captures visible light in monochrome. The exact weighting and
|
||||||
|
* wavelengths captured is not specified, but generally only includes the visible
|
||||||
|
* frequencies. This value implies a MONOCHROME camera.</p>
|
||||||
|
* @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
|
||||||
|
*/
|
||||||
|
public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO = 5;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <p>Sensor has a near infrared filter capturing light with wavelength between
|
||||||
|
* roughly 750nm and 1400nm, and the same filter covers the whole sensor array. This
|
||||||
|
* value implies a MONOCHROME camera.</p>
|
||||||
|
* @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
|
||||||
|
*/
|
||||||
|
public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR = 6;
|
||||||
|
|
||||||
//
|
//
|
||||||
// Enumeration values for CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
|
// Enumeration values for CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
|
||||||
//
|
//
|
||||||
|
@ -24,8 +24,8 @@ import android.hardware.camera2.impl.PublicKey;
|
|||||||
import android.hardware.camera2.impl.SyntheticKey;
|
import android.hardware.camera2.impl.SyntheticKey;
|
||||||
import android.hardware.camera2.params.OutputConfiguration;
|
import android.hardware.camera2.params.OutputConfiguration;
|
||||||
import android.hardware.camera2.utils.HashCodeHelpers;
|
import android.hardware.camera2.utils.HashCodeHelpers;
|
||||||
import android.hardware.camera2.utils.TypeReference;
|
|
||||||
import android.hardware.camera2.utils.SurfaceUtils;
|
import android.hardware.camera2.utils.SurfaceUtils;
|
||||||
|
import android.hardware.camera2.utils.TypeReference;
|
||||||
import android.os.Parcel;
|
import android.os.Parcel;
|
||||||
import android.os.Parcelable;
|
import android.os.Parcelable;
|
||||||
import android.util.ArraySet;
|
import android.util.ArraySet;
|
||||||
@ -2947,8 +2947,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
|
|||||||
* of points can be less than max (that is, the request doesn't have to
|
* of points can be less than max (that is, the request doesn't have to
|
||||||
* always provide a curve with number of points equivalent to
|
* always provide a curve with number of points equivalent to
|
||||||
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
||||||
* <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
|
* <p>For devices with MONOCHROME capability, all three channels must have the same set of
|
||||||
* are ignored.</p>
|
* control points.</p>
|
||||||
* <p>A few examples, and their corresponding graphical mappings; these
|
* <p>A few examples, and their corresponding graphical mappings; these
|
||||||
* only specify the red channel and the precision is limited to 4
|
* only specify the red channel and the precision is limited to 4
|
||||||
* digits, for conciseness.</p>
|
* digits, for conciseness.</p>
|
||||||
@ -3011,8 +3011,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
|
|||||||
* of points can be less than max (that is, the request doesn't have to
|
* of points can be less than max (that is, the request doesn't have to
|
||||||
* always provide a curve with number of points equivalent to
|
* always provide a curve with number of points equivalent to
|
||||||
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
||||||
* <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
|
* <p>For devices with MONOCHROME capability, all three channels must have the same set of
|
||||||
* are ignored.</p>
|
* control points.</p>
|
||||||
* <p>A few examples, and their corresponding graphical mappings; these
|
* <p>A few examples, and their corresponding graphical mappings; these
|
||||||
* only specify the red channel and the precision is limited to 4
|
* only specify the red channel and the precision is limited to 4
|
||||||
* digits, for conciseness.</p>
|
* digits, for conciseness.</p>
|
||||||
|
@ -3417,6 +3417,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* used to interpolate between the provided color transforms when
|
* used to interpolate between the provided color transforms when
|
||||||
* processing raw sensor data.</p>
|
* processing raw sensor data.</p>
|
||||||
* <p>The order of the values is R, G, B; where R is in the lowest index.</p>
|
* <p>The order of the values is R, G, B; where R is in the lowest index.</p>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
*/
|
*/
|
||||||
@PublicKey
|
@PublicKey
|
||||||
@ -3442,6 +3444,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* that channel.</p>
|
* that channel.</p>
|
||||||
* <p>A more detailed description of the noise model can be found in the
|
* <p>A more detailed description of the noise model can be found in the
|
||||||
* Adobe DNG specification for the NoiseProfile tag.</p>
|
* Adobe DNG specification for the NoiseProfile tag.</p>
|
||||||
|
* <p>For a MONOCHROME camera, there is only one color channel. So the noise model coefficients
|
||||||
|
* will only contain one S and one O.</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
*
|
*
|
||||||
* @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
|
* @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
|
||||||
@ -3482,6 +3486,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* <li>R > 1.20 will require strong software correction to produce
|
* <li>R > 1.20 will require strong software correction to produce
|
||||||
* a usuable image (>20% divergence).</li>
|
* a usuable image (>20% divergence).</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
|
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
|
||||||
|
* the camera device has RAW capability.</p>
|
||||||
* <p><b>Range of valid values:</b><br></p>
|
* <p><b>Range of valid values:</b><br></p>
|
||||||
* <p>>= 0</p>
|
* <p>>= 0</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
@ -3592,6 +3598,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
|
* layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
|
||||||
* nth value given corresponds to the black level offset for the nth
|
* nth value given corresponds to the black level offset for the nth
|
||||||
* color channel listed in the CFA.</p>
|
* color channel listed in the CFA.</p>
|
||||||
|
* <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values.</p>
|
||||||
* <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is available or the
|
* <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is available or the
|
||||||
* camera device advertises this key via {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
|
* camera device advertises this key via {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
|
||||||
* <p><b>Range of valid values:</b><br>
|
* <p><b>Range of valid values:</b><br>
|
||||||
@ -3852,6 +3859,17 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* <p>As a visualization only, inverting the full-color map to recover an
|
* <p>As a visualization only, inverting the full-color map to recover an
|
||||||
* image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
|
* image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
|
||||||
* <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
|
* <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
|
||||||
|
* <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example
|
||||||
|
* shading map for such a camera is defined as:</p>
|
||||||
|
* <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
|
||||||
|
* android.statistics.lensShadingMap =
|
||||||
|
* [ 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2,
|
||||||
|
* 1.1, 1.1, 1.1, 1.1, 1.3, 1.3, 1.3, 1.3,
|
||||||
|
* 1.2, 1.2, 1.2, 1.2, 1.1, 1.1, 1.1, 1.1,
|
||||||
|
* 1.0, 1.0, 1.0, 1.0, 1.2, 1.2, 1.2, 1.2,
|
||||||
|
* 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2,
|
||||||
|
* 1.2, 1.2, 1.2, 1.2, 1.3, 1.3, 1.3, 1.3 ]
|
||||||
|
* </code></pre>
|
||||||
* <p><b>Range of valid values:</b><br>
|
* <p><b>Range of valid values:</b><br>
|
||||||
* Each gain factor is >= 1</p>
|
* Each gain factor is >= 1</p>
|
||||||
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
|
||||||
@ -3894,13 +3912,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
|
* (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
|
||||||
* pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
|
* pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
|
||||||
* The map is assumed to be bilinearly interpolated between the sample points.</p>
|
* The map is assumed to be bilinearly interpolated between the sample points.</p>
|
||||||
* <p>The channel order is [R, Geven, Godd, B], where Geven is the green
|
* <p>For a Bayer camera, the channel order is [R, Geven, Godd, B], where Geven is
|
||||||
* channel for the even rows of a Bayer pattern, and Godd is the odd rows.
|
* the green channel for the even rows of a Bayer pattern, and Godd is the odd rows.
|
||||||
* The shading map is stored in a fully interleaved format, and its size
|
* The shading map is stored in a fully interleaved format, and its size
|
||||||
* is provided in the camera static metadata by android.lens.info.shadingMapSize.</p>
|
* is provided in the camera static metadata by android.lens.info.shadingMapSize.</p>
|
||||||
* <p>The shading map will generally have on the order of 30-40 rows and columns,
|
* <p>The shading map will generally have on the order of 30-40 rows and columns,
|
||||||
* and will be smaller than 64x64.</p>
|
* and will be smaller than 64x64.</p>
|
||||||
* <p>As an example, given a very small map defined as:</p>
|
* <p>As an example, given a very small map for a Bayer camera defined as:</p>
|
||||||
* <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
|
* <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
|
||||||
* android.statistics.lensShadingMap =
|
* android.statistics.lensShadingMap =
|
||||||
* [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
|
* [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
|
||||||
@ -3920,6 +3938,17 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* image of a gray wall (using bicubic interpolation for visual quality)
|
* image of a gray wall (using bicubic interpolation for visual quality)
|
||||||
* as captured by the sensor gives:</p>
|
* as captured by the sensor gives:</p>
|
||||||
* <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
|
* <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
|
||||||
|
* <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example
|
||||||
|
* shading map for such a camera is defined as:</p>
|
||||||
|
* <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
|
||||||
|
* android.statistics.lensShadingMap =
|
||||||
|
* [ 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2,
|
||||||
|
* 1.1, 1.1, 1.1, 1.1, 1.3, 1.3, 1.3, 1.3,
|
||||||
|
* 1.2, 1.2, 1.2, 1.2, 1.1, 1.1, 1.1, 1.1,
|
||||||
|
* 1.0, 1.0, 1.0, 1.0, 1.2, 1.2, 1.2, 1.2,
|
||||||
|
* 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2,
|
||||||
|
* 1.2, 1.2, 1.2, 1.2, 1.3, 1.3, 1.3, 1.3 ]
|
||||||
|
* </code></pre>
|
||||||
* <p>Note that the RAW image data might be subject to lens shading
|
* <p>Note that the RAW image data might be subject to lens shading
|
||||||
* correction not reported on this map. Query
|
* correction not reported on this map. Query
|
||||||
* {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject
|
* {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject
|
||||||
@ -4250,8 +4279,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* of points can be less than max (that is, the request doesn't have to
|
* of points can be less than max (that is, the request doesn't have to
|
||||||
* always provide a curve with number of points equivalent to
|
* always provide a curve with number of points equivalent to
|
||||||
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
||||||
* <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
|
* <p>For devices with MONOCHROME capability, all three channels must have the same set of
|
||||||
* are ignored.</p>
|
* control points.</p>
|
||||||
* <p>A few examples, and their corresponding graphical mappings; these
|
* <p>A few examples, and their corresponding graphical mappings; these
|
||||||
* only specify the red channel and the precision is limited to 4
|
* only specify the red channel and the precision is limited to 4
|
||||||
* digits, for conciseness.</p>
|
* digits, for conciseness.</p>
|
||||||
@ -4314,8 +4343,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
|
|||||||
* of points can be less than max (that is, the request doesn't have to
|
* of points can be less than max (that is, the request doesn't have to
|
||||||
* always provide a curve with number of points equivalent to
|
* always provide a curve with number of points equivalent to
|
||||||
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
|
||||||
* <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
|
* <p>For devices with MONOCHROME capability, all three channels must have the same set of
|
||||||
* are ignored.</p>
|
* control points.</p>
|
||||||
* <p>A few examples, and their corresponding graphical mappings; these
|
* <p>A few examples, and their corresponding graphical mappings; these
|
||||||
* only specify the red channel and the precision is limited to 4
|
* only specify the red channel and the precision is limited to 4
|
||||||
* digits, for conciseness.</p>
|
* digits, for conciseness.</p>
|
||||||
|
@ -16,13 +16,17 @@
|
|||||||
|
|
||||||
package android.hardware.camera2.params;
|
package android.hardware.camera2.params;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
import static com.android.internal.util.Preconditions.checkNotNull;
|
import static com.android.internal.util.Preconditions.checkNotNull;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Immutable class to store a 4-element vector of integers corresponding to a 2x2 pattern
|
* Immutable class to store a 4-element vector of integers corresponding to a 2x2 pattern
|
||||||
* of color channel offsets used for the black level offsets of each color channel.
|
* of color channel offsets used for the black level offsets of each color channel.
|
||||||
|
*
|
||||||
|
* For a camera device with
|
||||||
|
* {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME
|
||||||
|
* MONOCHROME} capability, all 4 elements of the pattern will have the same value.
|
||||||
*/
|
*/
|
||||||
public final class BlackLevelPattern {
|
public final class BlackLevelPattern {
|
||||||
|
|
||||||
@ -133,6 +137,12 @@ public final class BlackLevelPattern {
|
|||||||
* {@link android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}).
|
* {@link android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}).
|
||||||
* </p>
|
* </p>
|
||||||
*
|
*
|
||||||
|
* <p>A {@link
|
||||||
|
* android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME
|
||||||
|
* MONOCHROME} camera only has one channel. As a result, the returned string will contain 4
|
||||||
|
* identical values.
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
* @return string representation of {@link BlackLevelPattern}
|
* @return string representation of {@link BlackLevelPattern}
|
||||||
*
|
*
|
||||||
* @see android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
|
* @see android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
|
||||||
|
@ -34,6 +34,10 @@ import java.util.Arrays;
|
|||||||
* use as the tonemapping/contrast/gamma curve when {@link CaptureRequest#TONEMAP_MODE} is
|
* use as the tonemapping/contrast/gamma curve when {@link CaptureRequest#TONEMAP_MODE} is
|
||||||
* set to {@link CameraMetadata#TONEMAP_MODE_CONTRAST_CURVE}.</p>
|
* set to {@link CameraMetadata#TONEMAP_MODE_CONTRAST_CURVE}.</p>
|
||||||
*
|
*
|
||||||
|
* <p>For a camera device with
|
||||||
|
* {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME
|
||||||
|
* MONOCHROME} capability, all 3 channels will contain the same set of control points.
|
||||||
|
*
|
||||||
* <p>The total number of points {@code (Pin, Pout)} for each color channel can be no more than
|
* <p>The total number of points {@code (Pin, Pout)} for each color channel can be no more than
|
||||||
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS}.</p>
|
* {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS}.</p>
|
||||||
*
|
*
|
||||||
|
@ -892,6 +892,13 @@ static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
|
|||||||
cfaOut[3] = 0;
|
cfaOut[3] = 0;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
// MONO and NIR are degenerate case of RGGB pattern: only Red channel
|
||||||
|
// will be used.
|
||||||
|
case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
|
||||||
|
case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
|
||||||
|
cfaOut[0] = 0;
|
||||||
|
break;
|
||||||
|
}
|
||||||
default: {
|
default: {
|
||||||
return BAD_VALUE;
|
return BAD_VALUE;
|
||||||
}
|
}
|
||||||
@ -1063,6 +1070,8 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
|
|
||||||
uint32_t preWidth = 0;
|
uint32_t preWidth = 0;
|
||||||
uint32_t preHeight = 0;
|
uint32_t preHeight = 0;
|
||||||
|
uint8_t colorFilter = 0;
|
||||||
|
bool isBayer = true;
|
||||||
{
|
{
|
||||||
// Check dimensions
|
// Check dimensions
|
||||||
camera_metadata_entry entry =
|
camera_metadata_entry entry =
|
||||||
@ -1083,10 +1092,25 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
"either the preCorrectionActiveArraySize or the pixelArraySize.");
|
"either the preCorrectionActiveArraySize or the pixelArraySize.");
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
camera_metadata_entry colorFilterEntry =
|
||||||
|
characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
|
||||||
|
colorFilter = colorFilterEntry.data.u8[0];
|
||||||
|
camera_metadata_entry capabilitiesEntry =
|
||||||
|
characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
|
||||||
|
size_t capsCount = capabilitiesEntry.count;
|
||||||
|
uint8_t* caps = capabilitiesEntry.data.u8;
|
||||||
|
if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
|
||||||
|
!= caps+capsCount) {
|
||||||
|
isBayer = false;
|
||||||
|
} else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
|
||||||
|
colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
|
||||||
|
jniThrowException(env, "java/lang/AssertionError",
|
||||||
|
"A camera device with MONO/NIR color filter must have MONOCHROME capability.");
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
writer->addIfd(TIFF_IFD_0);
|
writer->addIfd(TIFF_IFD_0);
|
||||||
|
|
||||||
status_t err = OK;
|
status_t err = OK;
|
||||||
@ -1094,9 +1118,12 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
const uint32_t samplesPerPixel = 1;
|
const uint32_t samplesPerPixel = 1;
|
||||||
const uint32_t bitsPerSample = BITS_PER_SAMPLE;
|
const uint32_t bitsPerSample = BITS_PER_SAMPLE;
|
||||||
|
|
||||||
OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB;
|
OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
|
||||||
uint8_t cfaPlaneColor[3] = {0, 1, 2};
|
uint8_t cfaPlaneColor[3] = {0, 1, 2};
|
||||||
uint8_t cfaEnum = -1;
|
camera_metadata_entry cfaEntry =
|
||||||
|
characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
|
||||||
|
BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
|
||||||
|
uint8_t cfaEnum = cfaEntry.data.u8[0];
|
||||||
|
|
||||||
// TODO: Greensplit.
|
// TODO: Greensplit.
|
||||||
// TODO: Add remaining non-essential tags
|
// TODO: Add remaining non-essential tags
|
||||||
@ -1141,12 +1168,20 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
|
|
||||||
{
|
{
|
||||||
// Set photometric interpretation
|
// Set photometric interpretation
|
||||||
uint16_t interpretation = 32803; // CFA
|
uint16_t interpretation = isBayer ? 32803 /* CFA */ :
|
||||||
|
34892; /* Linear Raw */;
|
||||||
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
|
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
|
||||||
&interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
|
&interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
uint16_t repeatDim[2] = {2, 2};
|
||||||
|
if (!isBayer) {
|
||||||
|
repeatDim[0] = repeatDim[1] = 1;
|
||||||
|
}
|
||||||
|
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
|
||||||
|
TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
|
||||||
|
|
||||||
// Set blacklevel tags, using dynamic black level if available
|
// Set blacklevel tags, using dynamic black level if available
|
||||||
camera_metadata_entry entry =
|
camera_metadata_entry entry =
|
||||||
results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
|
results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
|
||||||
@ -1165,14 +1200,9 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
|
blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
|
||||||
blackLevelRational[i * 2 + 1] = 1;
|
blackLevelRational[i * 2 + 1] = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, 4, blackLevelRational,
|
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
|
||||||
TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
|
blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
|
||||||
|
|
||||||
uint16_t repeatDim[2] = {2, 2};
|
|
||||||
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
|
|
||||||
TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -1189,21 +1219,15 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
|
TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
// All CFA pattern tags are not necessary for monochrome cameras.
|
||||||
|
if (isBayer) {
|
||||||
// Set CFA pattern dimensions
|
// Set CFA pattern dimensions
|
||||||
uint16_t repeatDim[2] = {2, 2};
|
uint16_t repeatDim[2] = {2, 2};
|
||||||
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
|
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
|
||||||
TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
|
TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// Set CFA pattern
|
// Set CFA pattern
|
||||||
camera_metadata_entry entry =
|
|
||||||
characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
|
|
||||||
BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer);
|
|
||||||
|
|
||||||
const int cfaLength = 4;
|
const int cfaLength = 4;
|
||||||
cfaEnum = entry.data.u8[0];
|
|
||||||
uint8_t cfa[cfaLength];
|
uint8_t cfa[cfaLength];
|
||||||
if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
|
if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
|
||||||
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
|
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
|
||||||
@ -1214,15 +1238,11 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
env, TAG_CFAPATTERN, writer);
|
env, TAG_CFAPATTERN, writer);
|
||||||
|
|
||||||
opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
|
opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// Set CFA plane color
|
// Set CFA plane color
|
||||||
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
|
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
|
||||||
TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
|
TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// Set CFA layout
|
// Set CFA layout
|
||||||
uint16_t cfaLayout = 1;
|
uint16_t cfaLayout = 1;
|
||||||
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
|
BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
|
||||||
@ -1442,7 +1462,7 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool singleIlluminant = false;
|
bool singleIlluminant = false;
|
||||||
{
|
if (isBayer) {
|
||||||
// Set calibration illuminants
|
// Set calibration illuminants
|
||||||
camera_metadata_entry entry1 =
|
camera_metadata_entry entry1 =
|
||||||
characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
|
characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
|
||||||
@ -1464,7 +1484,7 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
if (isBayer) {
|
||||||
// Set color transforms
|
// Set color transforms
|
||||||
camera_metadata_entry entry1 =
|
camera_metadata_entry entry1 =
|
||||||
characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
|
characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
|
||||||
@ -1497,7 +1517,7 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
if (isBayer) {
|
||||||
// Set calibration transforms
|
// Set calibration transforms
|
||||||
camera_metadata_entry entry1 =
|
camera_metadata_entry entry1 =
|
||||||
characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
|
characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
|
||||||
@ -1531,7 +1551,7 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
if (isBayer) {
|
||||||
// Set forward transforms
|
// Set forward transforms
|
||||||
camera_metadata_entry entry1 =
|
camera_metadata_entry entry1 =
|
||||||
characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
|
characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
|
||||||
@ -1565,7 +1585,7 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
if (isBayer) {
|
||||||
// Set camera neutral
|
// Set camera neutral
|
||||||
camera_metadata_entry entry =
|
camera_metadata_entry entry =
|
||||||
results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
|
results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
|
||||||
@ -1632,8 +1652,8 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
camera_metadata_entry entry =
|
camera_metadata_entry entry =
|
||||||
results.find(ANDROID_SENSOR_NOISE_PROFILE);
|
results.find(ANDROID_SENSOR_NOISE_PROFILE);
|
||||||
|
|
||||||
const status_t numPlaneColors = 3;
|
const status_t numPlaneColors = isBayer ? 3 : 1;
|
||||||
const status_t numCfaChannels = 4;
|
const status_t numCfaChannels = isBayer ? 4 : 1;
|
||||||
|
|
||||||
uint8_t cfaOut[numCfaChannels];
|
uint8_t cfaOut[numCfaChannels];
|
||||||
if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
|
if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
|
||||||
@ -1710,42 +1730,44 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Hot pixel map is specific to bayer camera per DNG spec.
|
||||||
|
if (isBayer) {
|
||||||
|
// Set up bad pixel correction list
|
||||||
|
camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
|
||||||
|
|
||||||
// Set up bad pixel correction list
|
if ((entry3.count % 2) != 0) {
|
||||||
camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
|
ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
|
||||||
|
__FUNCTION__);
|
||||||
if ((entry3.count % 2) != 0) {
|
|
||||||
ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
|
|
||||||
__FUNCTION__);
|
|
||||||
jniThrowRuntimeException(env, "failed to add hotpixel map.");
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
|
|
||||||
std::vector<uint32_t> v;
|
|
||||||
for (size_t i = 0; i < entry3.count; i += 2) {
|
|
||||||
int32_t x = entry3.data.i32[i];
|
|
||||||
int32_t y = entry3.data.i32[i + 1];
|
|
||||||
x -= static_cast<int32_t>(xmin);
|
|
||||||
y -= static_cast<int32_t>(ymin);
|
|
||||||
if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
|
|
||||||
static_cast<uint32_t>(y) >= height) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
v.push_back(x);
|
|
||||||
v.push_back(y);
|
|
||||||
}
|
|
||||||
const uint32_t* badPixels = &v[0];
|
|
||||||
uint32_t badPixelCount = v.size();
|
|
||||||
|
|
||||||
if (badPixelCount > 0) {
|
|
||||||
err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
|
|
||||||
|
|
||||||
if (err != OK) {
|
|
||||||
ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
|
|
||||||
jniThrowRuntimeException(env, "failed to add hotpixel map.");
|
jniThrowRuntimeException(env, "failed to add hotpixel map.");
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
|
||||||
|
std::vector<uint32_t> v;
|
||||||
|
for (size_t i = 0; i < entry3.count; i += 2) {
|
||||||
|
int32_t x = entry3.data.i32[i];
|
||||||
|
int32_t y = entry3.data.i32[i + 1];
|
||||||
|
x -= static_cast<int32_t>(xmin);
|
||||||
|
y -= static_cast<int32_t>(ymin);
|
||||||
|
if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
|
||||||
|
static_cast<uint32_t>(y) >= height) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
v.push_back(x);
|
||||||
|
v.push_back(y);
|
||||||
|
}
|
||||||
|
const uint32_t* badPixels = &v[0];
|
||||||
|
uint32_t badPixelCount = v.size();
|
||||||
|
|
||||||
|
if (badPixelCount > 0) {
|
||||||
|
err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
|
||||||
|
|
||||||
|
if (err != OK) {
|
||||||
|
ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
|
||||||
|
jniThrowRuntimeException(env, "failed to add hotpixel map.");
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (builder.getCount() > 0) {
|
if (builder.getCount() > 0) {
|
||||||
@ -1960,10 +1982,12 @@ static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image
|
|||||||
tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
|
tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
|
||||||
tagsToMove.add(TAG_SAMPLESPERPIXEL);
|
tagsToMove.add(TAG_SAMPLESPERPIXEL);
|
||||||
tagsToMove.add(TAG_PLANARCONFIGURATION);
|
tagsToMove.add(TAG_PLANARCONFIGURATION);
|
||||||
tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
|
if (isBayer) {
|
||||||
tagsToMove.add(TAG_CFAPATTERN);
|
tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
|
||||||
tagsToMove.add(TAG_CFAPLANECOLOR);
|
tagsToMove.add(TAG_CFAPATTERN);
|
||||||
tagsToMove.add(TAG_CFALAYOUT);
|
tagsToMove.add(TAG_CFAPLANECOLOR);
|
||||||
|
tagsToMove.add(TAG_CFALAYOUT);
|
||||||
|
}
|
||||||
tagsToMove.add(TAG_XRESOLUTION);
|
tagsToMove.add(TAG_XRESOLUTION);
|
||||||
tagsToMove.add(TAG_YRESOLUTION);
|
tagsToMove.add(TAG_YRESOLUTION);
|
||||||
tagsToMove.add(TAG_RESOLUTIONUNIT);
|
tagsToMove.add(TAG_RESOLUTIONUNIT);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user