diff --git a/api/current.txt b/api/current.txt index 533c70f9221c..dbbf5656587b 100755 --- a/api/current.txt +++ b/api/current.txt @@ -16628,6 +16628,8 @@ package android.hardware.camera2 { field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3; // 0x3 field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2; // 0x2 field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1; // 0x1 + field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO = 5; // 0x5 + field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR = 6; // 0x6 field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4; // 0x4 field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB = 0; // 0x0 field public static final int SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME = 1; // 0x1 diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java index 444ca87a68ef..7148b124253e 100644 --- a/core/java/android/hardware/camera2/CameraCharacteristics.java +++ b/core/java/android/hardware/camera2/CameraCharacteristics.java @@ -28,8 +28,8 @@ import android.hardware.camera2.utils.ArrayUtils; import android.hardware.camera2.utils.TypeReference; import android.util.Rational; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -2668,7 +2668,8 @@ public final class CameraCharacteristics extends CameraMetadataThe arrangement of color filters on sensor; * represents the colors in the top-left 2x2 section of - * the sensor, in reading order.

+ * the sensor, in reading order, for a Bayer camera, or the + * light spectrum it captures for MONOCHROME camera.

*

Possible values: *

*

Optional - This value may be {@code null} on some devices.

*

Full capability - @@ -2688,6 +2691,8 @@ public final class CameraCharacteristics extends CameraMetadata SENSOR_INFO_COLOR_FILTER_ARRANGEMENT = @@ -2919,6 +2924,8 @@ public final class CameraCharacteristics extends CameraMetadataSome devices may choose to provide a second set of calibration * information for improved quality, including * {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2} and its corresponding matrices.

+ *

Starting from Android Q, this key will not be present for a MONOCHROME camera, even if + * the camera device has RAW capability.

*

Possible values: *

+ *

Starting from Android Q, this key will not be present for a MONOCHROME camera, even if + * the camera device has RAW capability.

*

Range of valid values:

*

>= 0

*

Optional - This value may be {@code null} on some devices.

@@ -3592,6 +3598,7 @@ public class CaptureResult extends CameraMetadata> { * layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the * nth value given corresponds to the black level offset for the nth * color channel listed in the CFA.

+ *

For a MONOCHROME camera, all of the 2x2 channels must have the same values.

*

This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is available or the * camera device advertises this key via {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.

*

Range of valid values:
@@ -3852,6 +3859,17 @@ public class CaptureResult extends CameraMetadata> { *

As a visualization only, inverting the full-color map to recover an * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:

*

Image of a uniform white wall (inverse shading map)

+ *

For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example + * shading map for such a camera is defined as:

+ *
android.lens.info.shadingMapSize = [ 4, 3 ]
+     * android.statistics.lensShadingMap =
+     * [ 1.3, 1.3, 1.3, 1.3,  1.2, 1.2, 1.2, 1.2,
+     *     1.1, 1.1, 1.1, 1.1,  1.3, 1.3, 1.3, 1.3,
+     *   1.2, 1.2, 1.2, 1.2,  1.1, 1.1, 1.1, 1.1,
+     *     1.0, 1.0, 1.0, 1.0,  1.2, 1.2, 1.2, 1.2,
+     *   1.3, 1.3, 1.3, 1.3,   1.2, 1.2, 1.2, 1.2,
+     *     1.2, 1.2, 1.2, 1.2,  1.3, 1.3, 1.3, 1.3 ]
+     * 
*

Range of valid values:
* Each gain factor is >= 1

*

Optional - This value may be {@code null} on some devices.

@@ -3894,13 +3912,13 @@ public class CaptureResult extends CameraMetadata> { * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. * The map is assumed to be bilinearly interpolated between the sample points.

- *

The channel order is [R, Geven, Godd, B], where Geven is the green - * channel for the even rows of a Bayer pattern, and Godd is the odd rows. + *

For a Bayer camera, the channel order is [R, Geven, Godd, B], where Geven is + * the green channel for the even rows of a Bayer pattern, and Godd is the odd rows. * The shading map is stored in a fully interleaved format, and its size * is provided in the camera static metadata by android.lens.info.shadingMapSize.

*

The shading map will generally have on the order of 30-40 rows and columns, * and will be smaller than 64x64.

- *

As an example, given a very small map defined as:

+ *

As an example, given a very small map for a Bayer camera defined as:

*
android.lens.info.shadingMapSize = [ 4, 3 ]
      * android.statistics.lensShadingMap =
      * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
@@ -3920,6 +3938,17 @@ public class CaptureResult extends CameraMetadata> {
      * image of a gray wall (using bicubic interpolation for visual quality)
      * as captured by the sensor gives:

*

Image of a uniform white wall (inverse shading map)

+ *

For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example + * shading map for such a camera is defined as:

+ *
android.lens.info.shadingMapSize = [ 4, 3 ]
+     * android.statistics.lensShadingMap =
+     * [ 1.3, 1.3, 1.3, 1.3,  1.2, 1.2, 1.2, 1.2,
+     *     1.1, 1.1, 1.1, 1.1,  1.3, 1.3, 1.3, 1.3,
+     *   1.2, 1.2, 1.2, 1.2,  1.1, 1.1, 1.1, 1.1,
+     *     1.0, 1.0, 1.0, 1.0,  1.2, 1.2, 1.2, 1.2,
+     *   1.3, 1.3, 1.3, 1.3,   1.2, 1.2, 1.2, 1.2,
+     *     1.2, 1.2, 1.2, 1.2,  1.3, 1.3, 1.3, 1.3 ]
+     * 
*

Note that the RAW image data might be subject to lens shading * correction not reported on this map. Query * {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject @@ -4250,8 +4279,8 @@ public class CaptureResult extends CameraMetadata> { * of points can be less than max (that is, the request doesn't have to * always provide a curve with number of points equivalent to * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).

- *

For devices with MONOCHROME capability, only red channel is used. Green and blue channels - * are ignored.

+ *

For devices with MONOCHROME capability, all three channels must have the same set of + * control points.

*

A few examples, and their corresponding graphical mappings; these * only specify the red channel and the precision is limited to 4 * digits, for conciseness.

@@ -4314,8 +4343,8 @@ public class CaptureResult extends CameraMetadata> { * of points can be less than max (that is, the request doesn't have to * always provide a curve with number of points equivalent to * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).

- *

For devices with MONOCHROME capability, only red channel is used. Green and blue channels - * are ignored.

+ *

For devices with MONOCHROME capability, all three channels must have the same set of + * control points.

*

A few examples, and their corresponding graphical mappings; these * only specify the red channel and the precision is limited to 4 * digits, for conciseness.

diff --git a/core/java/android/hardware/camera2/params/BlackLevelPattern.java b/core/java/android/hardware/camera2/params/BlackLevelPattern.java index 6d6c094ebf04..283977fecbd5 100644 --- a/core/java/android/hardware/camera2/params/BlackLevelPattern.java +++ b/core/java/android/hardware/camera2/params/BlackLevelPattern.java @@ -16,13 +16,17 @@ package android.hardware.camera2.params; -import java.util.Arrays; - import static com.android.internal.util.Preconditions.checkNotNull; +import java.util.Arrays; + /** * Immutable class to store a 4-element vector of integers corresponding to a 2x2 pattern * of color channel offsets used for the black level offsets of each color channel. + * + * For a camera device with + * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME + * MONOCHROME} capability, all 4 elements of the pattern will have the same value. */ public final class BlackLevelPattern { @@ -133,6 +137,12 @@ public final class BlackLevelPattern { * {@link android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}). *

* + *

A {@link + * android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME + * MONOCHROME} camera only has one channel. As a result, the returned string will contain 4 + * identical values. + *

+ * * @return string representation of {@link BlackLevelPattern} * * @see android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT diff --git a/core/java/android/hardware/camera2/params/TonemapCurve.java b/core/java/android/hardware/camera2/params/TonemapCurve.java index 71e68a5271c2..90e63556f018 100644 --- a/core/java/android/hardware/camera2/params/TonemapCurve.java +++ b/core/java/android/hardware/camera2/params/TonemapCurve.java @@ -34,6 +34,10 @@ import java.util.Arrays; * use as the tonemapping/contrast/gamma curve when {@link CaptureRequest#TONEMAP_MODE} is * set to {@link CameraMetadata#TONEMAP_MODE_CONTRAST_CURVE}.

* + *

For a camera device with + * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME + * MONOCHROME} capability, all 3 channels will contain the same set of control points. + * *

The total number of points {@code (Pin, Pout)} for each color channel can be no more than * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS}.

* diff --git a/core/jni/android_hardware_camera2_DngCreator.cpp b/core/jni/android_hardware_camera2_DngCreator.cpp index c977437f7df2..29051f14c72f 100644 --- a/core/jni/android_hardware_camera2_DngCreator.cpp +++ b/core/jni/android_hardware_camera2_DngCreator.cpp @@ -892,6 +892,13 @@ static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) { cfaOut[3] = 0; break; } + // MONO and NIR are degenerate case of RGGB pattern: only Red channel + // will be used. + case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO: + case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: { + cfaOut[0] = 0; + break; + } default: { return BAD_VALUE; } @@ -1063,6 +1070,8 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image uint32_t preWidth = 0; uint32_t preHeight = 0; + uint8_t colorFilter = 0; + bool isBayer = true; { // Check dimensions camera_metadata_entry entry = @@ -1083,10 +1092,25 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image "either the preCorrectionActiveArraySize or the pixelArraySize."); return nullptr; } + + camera_metadata_entry colorFilterEntry = + characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT); + colorFilter = colorFilterEntry.data.u8[0]; + camera_metadata_entry capabilitiesEntry = + characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES); + size_t capsCount = capabilitiesEntry.count; + uint8_t* caps = capabilitiesEntry.data.u8; + if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) + != caps+capsCount) { + isBayer = false; + } else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO || + colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) { + jniThrowException(env, "java/lang/AssertionError", + "A camera device with MONO/NIR color filter must have MONOCHROME capability."); + return nullptr; + } } - - writer->addIfd(TIFF_IFD_0); status_t err = OK; @@ -1094,9 +1118,12 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image const uint32_t samplesPerPixel = 1; const uint32_t bitsPerSample = BITS_PER_SAMPLE; - OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB; + OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE; uint8_t cfaPlaneColor[3] = {0, 1, 2}; - uint8_t cfaEnum = -1; + camera_metadata_entry cfaEntry = + characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT); + BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer); + uint8_t cfaEnum = cfaEntry.data.u8[0]; // TODO: Greensplit. // TODO: Add remaining non-essential tags @@ -1141,12 +1168,20 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image { // Set photometric interpretation - uint16_t interpretation = 32803; // CFA + uint16_t interpretation = isBayer ? 32803 /* CFA */ : + 34892; /* Linear Raw */; BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1, &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer); } { + uint16_t repeatDim[2] = {2, 2}; + if (!isBayer) { + repeatDim[0] = repeatDim[1] = 1; + } + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim, + TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer); + // Set blacklevel tags, using dynamic black level if available camera_metadata_entry entry = results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL); @@ -1165,14 +1200,9 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image blackLevelRational[i * 2] = static_cast(entry.data.i32[i]); blackLevelRational[i * 2 + 1] = 1; } - } - BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, 4, blackLevelRational, - TIFF_IFD_0), env, TAG_BLACKLEVEL, writer); - - uint16_t repeatDim[2] = {2, 2}; - BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim, - TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1], + blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer); } { @@ -1189,21 +1219,15 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer); } - { + // All CFA pattern tags are not necessary for monochrome cameras. + if (isBayer) { // Set CFA pattern dimensions uint16_t repeatDim[2] = {2, 2}; BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim, TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer); - } - { // Set CFA pattern - camera_metadata_entry entry = - characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT); - BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer); - const int cfaLength = 4; - cfaEnum = entry.data.u8[0]; uint8_t cfa[cfaLength]; if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) { jniThrowExceptionFmt(env, "java/lang/IllegalStateException", @@ -1214,15 +1238,11 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image env, TAG_CFAPATTERN, writer); opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum); - } - { // Set CFA plane color BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor, TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer); - } - { // Set CFA layout uint16_t cfaLayout = 1; BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0), @@ -1442,7 +1462,7 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image } bool singleIlluminant = false; - { + if (isBayer) { // Set calibration illuminants camera_metadata_entry entry1 = characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1); @@ -1464,7 +1484,7 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image } } - { + if (isBayer) { // Set color transforms camera_metadata_entry entry1 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1); @@ -1497,7 +1517,7 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image } } - { + if (isBayer) { // Set calibration transforms camera_metadata_entry entry1 = characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1); @@ -1531,7 +1551,7 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image } } - { + if (isBayer) { // Set forward transforms camera_metadata_entry entry1 = characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1); @@ -1565,7 +1585,7 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image } } - { + if (isBayer) { // Set camera neutral camera_metadata_entry entry = results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT); @@ -1632,8 +1652,8 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image camera_metadata_entry entry = results.find(ANDROID_SENSOR_NOISE_PROFILE); - const status_t numPlaneColors = 3; - const status_t numCfaChannels = 4; + const status_t numPlaneColors = isBayer ? 3 : 1; + const status_t numCfaChannels = isBayer ? 4 : 1; uint8_t cfaOut[numCfaChannels]; if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) { @@ -1710,42 +1730,44 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image } } + // Hot pixel map is specific to bayer camera per DNG spec. + if (isBayer) { + // Set up bad pixel correction list + camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP); - // Set up bad pixel correction list - camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP); - - if ((entry3.count % 2) != 0) { - ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!", - __FUNCTION__); - jniThrowRuntimeException(env, "failed to add hotpixel map."); - return nullptr; - } - - // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag - std::vector v; - for (size_t i = 0; i < entry3.count; i += 2) { - int32_t x = entry3.data.i32[i]; - int32_t y = entry3.data.i32[i + 1]; - x -= static_cast(xmin); - y -= static_cast(ymin); - if (x < 0 || y < 0 || static_cast(x) >= width || - static_cast(y) >= height) { - continue; - } - v.push_back(x); - v.push_back(y); - } - const uint32_t* badPixels = &v[0]; - uint32_t badPixelCount = v.size(); - - if (badPixelCount > 0) { - err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout); - - if (err != OK) { - ALOGE("%s: Could not add hotpixel map.", __FUNCTION__); + if ((entry3.count % 2) != 0) { + ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!", + __FUNCTION__); jniThrowRuntimeException(env, "failed to add hotpixel map."); return nullptr; } + + // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag + std::vector v; + for (size_t i = 0; i < entry3.count; i += 2) { + int32_t x = entry3.data.i32[i]; + int32_t y = entry3.data.i32[i + 1]; + x -= static_cast(xmin); + y -= static_cast(ymin); + if (x < 0 || y < 0 || static_cast(x) >= width || + static_cast(y) >= height) { + continue; + } + v.push_back(x); + v.push_back(y); + } + const uint32_t* badPixels = &v[0]; + uint32_t badPixelCount = v.size(); + + if (badPixelCount > 0) { + err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout); + + if (err != OK) { + ALOGE("%s: Could not add hotpixel map.", __FUNCTION__); + jniThrowRuntimeException(env, "failed to add hotpixel map."); + return nullptr; + } + } } if (builder.getCount() > 0) { @@ -1960,10 +1982,12 @@ static sp DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t image tagsToMove.add(TAG_BLACKLEVELREPEATDIM); tagsToMove.add(TAG_SAMPLESPERPIXEL); tagsToMove.add(TAG_PLANARCONFIGURATION); - tagsToMove.add(TAG_CFAREPEATPATTERNDIM); - tagsToMove.add(TAG_CFAPATTERN); - tagsToMove.add(TAG_CFAPLANECOLOR); - tagsToMove.add(TAG_CFALAYOUT); + if (isBayer) { + tagsToMove.add(TAG_CFAREPEATPATTERNDIM); + tagsToMove.add(TAG_CFAPATTERN); + tagsToMove.add(TAG_CFAPLANECOLOR); + tagsToMove.add(TAG_CFALAYOUT); + } tagsToMove.add(TAG_XRESOLUTION); tagsToMove.add(TAG_YRESOLUTION); tagsToMove.add(TAG_RESOLUTIONUNIT);