am 577d0dac: Merge changes I536021b9,I6eb541c9,I6ac08eca into klp-dev

* commit '577d0dac1d263c6f403f15c587aaf11983e10e44':
  SmartCamera: Fix to build against latest camera2 api
  Camera2Tests: update SmartCamera for async api
  Camera2Tests: Add SmartCamera App
This commit is contained in:
Igor Murashkin
2013-10-15 17:06:46 -07:00
committed by Android Git Automerger
148 changed files with 20302 additions and 0 deletions

View File

@ -0,0 +1,14 @@
# Copyright 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include $(call all-subdir-makefiles)

View File

@ -0,0 +1,60 @@
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Smart Camera / Auto Snapshot (formerly named SimpleCamera) ReadMe
Created by: Benjamin W Hendricks
How to build the application:
From root: make SmartCamera will build the apk for generic
Otherwise, to build the application for a specific device, lunch to that device
and then run mm while in the SimpleCamera directory.
Then take the given Install path (out/target/.../SmartCamera.apk)
and run adb install out/target/.../SmartCamera.apk. The application should
then appear in the launcher of your device.
You might also need to run adb sync after building to sync the
libsmartcamera_jni library
Summarized:
make SmartCamera
adb remount
adb sync
adb install -r $ANDROID_PRODUCT_OUT/data/app/SmartCamera.apk
How to run the application:
On a Nexus 7, open up the application from the launcher, and the camera preview
should appear. From there, you can go to the gallery with the gallery button or
press start to start capturing images. You can also change the number of images
to be captured by changing the number on the spinner (between 1-10).
What does it do:
The application tries to take good pictures for you automatically when in the
start mode. On stop, the application will capture whatever images are in the
bottom preview and save them to the Gallery. It does this by looking at the
following image features:
- Sharpness
- Brightness
- Motion of the device
- Colorfulness
- Contrast
- Exposure (over/under)
By comparing each of these features frame by frame, a score is calculated to
determine whether an image is better or worse than the previous few frames,
and from that score I can determine the great images from the bad ones.
What libraries does it use:
- Mobile Filter Framework (MFF)
- Camera2 API
- Renderscript

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>CameraShoot</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,42 @@
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ifneq ($(TARGET_BUILD_JAVA_SUPPORT_LEVEL),)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE_TAGS := tests
LOCAL_PROGUARD_ENABLED := disabled
# comment it out for now since we need use some hidden APIs
# LOCAL_SDK_VERSION := current
LOCAL_STATIC_JAVA_LIBRARIES := android-ex-camera2
LOCAL_SRC_FILES := \
$(call all-java-files-under, src) \
$(call all-renderscript-files-under, src)
LOCAL_PACKAGE_NAME := SmartCamera
LOCAL_JNI_SHARED_LIBRARIES := libsmartcamera_jni
include $(BUILD_PACKAGE)
# Include packages in subdirectories
include $(call all-makefiles-under,$(LOCAL_PATH))
endif

View File

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:versionCode="1"
android:versionName="1.0"
package="androidx.media.filterfw.samples.simplecamera">
<uses-sdk android:minSdkVersion="18" android:targetSdkVersion="19"/>
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<application android:label="Smart Camera"
android:debuggable="true">
<uses-library android:name="com.google.android.media.effects"
android:required="false" />
<activity android:name=".SmartCamera"
android:label="Smart Camera"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 122 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

View File

@ -0,0 +1,49 @@
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
FILTERFW_NATIVE_PATH := $(call my-dir)
#
# Build module libfilterframework
#
LOCAL_PATH := $(FILTERFW_NATIVE_PATH)
include $(CLEAR_VARS)
LOCAL_MODULE_TAGS := tests
LOCAL_SDK_VERSION := 14
LOCAL_MODULE := libsmartcamera_jni
LOCAL_SRC_FILES := contrast.cpp \
brightness.cpp \
exposure.cpp \
colorspace.cpp \
histogram.cpp \
frametovalues.cpp \
pixelutils.cpp \
sobeloperator.cpp \
stats_scorer.cpp
LOCAL_STATIC_LIBRARIES += \
libcutils
LOCAL_C_INCLUDES += \
system/core/include \
LOCAL_NDK_STL_VARIANT := stlport_static
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,16 @@
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
APP_STL := stlport_static

View File

@ -0,0 +1,52 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract brightness from image (handed down as ByteBuffer).
#include "brightness.h"
#include <math.h>
#include <string.h>
#include <jni.h>
#include <unistd.h>
#include <android/log.h>
jfloat
Java_androidx_media_filterfw_samples_simplecamera_AvgBrightnessFilter_brightnessOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
if (imageBuffer == 0) {
return 0.0f;
}
float pixelTotals[] = { 0.0f, 0.0f, 0.0f };
const int numPixels = width * height;
unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
for (int i = 0; i < numPixels; i++) {
pixelTotals[0] += *(srcPtr + 4 * i);
pixelTotals[1] += *(srcPtr + 4 * i + 1);
pixelTotals[2] += *(srcPtr + 4 * i + 2);
}
float avgPixels[] = { 0.0f, 0.0f, 0.0f };
avgPixels[0] = pixelTotals[0] / numPixels;
avgPixels[1] = pixelTotals[1] / numPixels;
avgPixels[2] = pixelTotals[2] / numPixels;
float returnValue = sqrt(0.241f * avgPixels[0] * avgPixels[0] +
0.691f * avgPixels[1] * avgPixels[1] +
0.068f * avgPixels[2] * avgPixels[2]);
return returnValue / 255;
}

View File

@ -0,0 +1,36 @@
/* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract brightness from image (handed down as ByteBuffer).
#ifndef ANDROID_FILTERFW_JNI_BRIGHTNESS_H
#define ANDROID_FILTERFW_JNI_BRIGHTNESS_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jfloat JNICALL
Java_androidx_media_filterfw_samples_simplecamera_AvgBrightnessFilter_brightnessOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_BRIGHTNESS_H

View File

@ -0,0 +1,177 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "colorspace.h"
#include <jni.h>
#include <stdint.h>
typedef uint8_t uint8;
typedef uint32_t uint32;
typedef int32_t int32;
// RGBA helper struct allows access as int and individual channels
// WARNING: int value depends on endianness and should not be used to analyze individual channels.
union Rgba {
uint32 color;
uint8 channel[4];
};
// Channel index constants
static const uint8 kRed = 0;
static const uint8 kGreen = 1;
static const uint8 kBlue = 2;
static const uint8 kAlpha = 3;
// Clamp to range 0-255
static inline uint32 clamp(int32 x) {
return x > 255 ? 255 : (x < 0 ? 0 : x);
}
// Convert YUV to RGBA
// This uses the ITU-R BT.601 coefficients.
static inline Rgba convertYuvToRgba(int32 y, int32 u, int32 v) {
Rgba color;
color.channel[kRed] = clamp(y + static_cast<int>(1.402 * v));
color.channel[kGreen] = clamp(y - static_cast<int>(0.344 * u + 0.714 * v));
color.channel[kBlue] = clamp(y + static_cast<int>(1.772 * u));
color.channel[kAlpha] = 0xFF;
return color;
}
// Colorspace conversion functions /////////////////////////////////////////////////////////////////
void JNI_COLORSPACE_METHOD(nativeYuv420pToRgba8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
uint8* const pInput = static_cast<uint8*>(env->GetDirectBufferAddress(input));
Rgba* const pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
const int size = width * height;
uint8* pInY = pInput;
uint8* pInU = pInput + size;
uint8* pInV = pInput + size + size / 4;
Rgba* pOutColor = pOutput;
const int u_offset = size;
const int v_offset = u_offset + size / 4;
for (int y = 0; y < height; y += 2) {
for (int x = 0; x < width; x += 2) {
int u, v, y1, y2, y3, y4;
y1 = pInY[0];
y2 = pInY[1];
y3 = pInY[width];
y4 = pInY[width + 1];
u = *pInU - 128;
v = *pInV - 128;
pOutColor[0] = convertYuvToRgba(y1, u, v);
pOutColor[1] = convertYuvToRgba(y2, u, v);
pOutColor[width] = convertYuvToRgba(y3, u, v);
pOutColor[width + 1] = convertYuvToRgba(y4, u, v);
pInY += 2;
pInU++;
pInV++;
pOutColor += 2;
}
pInY += width;
pOutColor += width;
}
}
void JNI_COLORSPACE_METHOD(nativeArgb8888ToRgba8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
Rgba* pInput = static_cast<Rgba*>(env->GetDirectBufferAddress(input));
Rgba* pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
for (int i = 0; i < width * height; ++i) {
Rgba color_in = *pInput++;
Rgba& color_out = *pOutput++;
color_out.channel[kRed] = color_in.channel[kGreen];
color_out.channel[kGreen] = color_in.channel[kBlue];
color_out.channel[kBlue] = color_in.channel[kAlpha];
color_out.channel[kAlpha] = color_in.channel[kRed];
}
}
void JNI_COLORSPACE_METHOD(nativeRgba8888ToHsva8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
Rgba* pInput = static_cast<Rgba*>(env->GetDirectBufferAddress(input));
Rgba* pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
int r, g, b, a, h, s, v, c_max, c_min;
float delta;
for (int i = 0; i < width * height; ++i) {
Rgba color_in = *pInput++;
Rgba& color_out = *pOutput++;
r = color_in.channel[kRed];
g = color_in.channel[kGreen];
b = color_in.channel[kBlue];
a = color_in.channel[kAlpha];
if (r > g) {
c_min = (g > b) ? b : g;
c_max = (r > b) ? r : b;
} else {
c_min = (r > b) ? b : r;
c_max = (g > b) ? g : b;
}
delta = c_max -c_min;
float scaler = 255 * 60 / 360.0f;
if (c_max == r) {
h = (g > b) ? static_cast<int>(scaler * (g - b) / delta) :
static_cast<int>(scaler * ((g - b) / delta + 6));
} else if (c_max == g) {
h = static_cast<int>(scaler * ((b - r) / delta + 2));
} else { // Cmax == b
h = static_cast<int>(scaler * ((r - g) / delta + 4));
}
s = (delta == 0.0f) ? 0 : static_cast<unsigned char>(delta / c_max * 255);
v = c_max;
color_out.channel[kRed] = h;
color_out.channel[kGreen] = s;
color_out.channel[kBlue] = v;
color_out.channel[kAlpha] = a;
}
}
void JNI_COLORSPACE_METHOD(nativeRgba8888ToYcbcra8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
Rgba* pInput = static_cast<Rgba*>(env->GetDirectBufferAddress(input));
Rgba* pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
int r, g, b;
for (int i = 0; i < width * height; ++i) {
Rgba color_in = *pInput++;
Rgba& color_out = *pOutput++;
r = color_in.channel[kRed];
g = color_in.channel[kGreen];
b = color_in.channel[kBlue];
color_out.channel[kRed] =
static_cast<unsigned char>((65.738 * r + 129.057 * g + 25.064 * b) / 256 + 16);
color_out.channel[kGreen] =
static_cast<unsigned char>((-37.945 * r - 74.494 * g + 112.439 * b) / 256 + 128);
color_out.channel[kBlue] =
static_cast<unsigned char>((112.439 * r - 94.154 * g - 18.285 * b) / 256 + 128);
color_out.channel[kAlpha] = color_in.channel[kAlpha];
}
}

View File

@ -0,0 +1,50 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_FILTERFW_JNI_COLORSPACE_H
#define ANDROID_FILTERFW_JNI_COLORSPACE_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
#define JNI_COLORSPACE_METHOD(METHOD_NAME) \
Java_androidx_media_filterfw_ColorSpace_ ## METHOD_NAME
JNIEXPORT void JNICALL
JNI_COLORSPACE_METHOD(nativeYuv420pToRgba8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
JNIEXPORT void JNICALL
JNI_COLORSPACE_METHOD(nativeArgb8888ToRgba8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
JNIEXPORT void JNICALL
JNI_COLORSPACE_METHOD(nativeRgba8888ToHsva8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
JNIEXPORT void JNICALL
JNI_COLORSPACE_METHOD(nativeRgba8888ToYcbcra8888)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_COLORSPACE_H

View File

@ -0,0 +1,51 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract contrast ratio from image (handed down as ByteBuffer).
#include "contrast.h"
#include <math.h>
#include <string.h>
#include <jni.h>
#include <unistd.h>
#include <android/log.h>
jfloat
Java_androidx_media_filterfw_samples_simplecamera_ContrastRatioFilter_contrastOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
if (imageBuffer == 0) {
return 0.0f;
}
float total = 0;
const int numPixels = width * height;
unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
float* lumArray = new float[numPixels];
for (int i = 0; i < numPixels; i++) {
lumArray[i] = (0.2126f * *(srcPtr + 4 * i) + 0.7152f *
*(srcPtr + 4 * i + 1) + 0.0722f * *(srcPtr + 4 * i + 2)) / 255;
total += lumArray[i];
}
const float avg = total / numPixels;
float sum = 0;
for (int i = 0; i < numPixels; i++) {
sum += (lumArray[i] - avg) * (lumArray[i] - avg);
}
delete[] lumArray;
return ((float) sqrt(sum / numPixels));
}

View File

@ -0,0 +1,36 @@
/* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract contrast from image (handed down as ByteBuffer).
#ifndef ANDROID_FILTERFW_JNI_CONTRAST_H
#define ANDROID_FILTERFW_JNI_CONTRAST_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jfloat JNICALL
Java_androidx_media_filterfw_samples_simplecamera_ContrastRatioFilter_contrastOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_CONTRAST_H

View File

@ -0,0 +1,70 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract exposure from image (handed down as ByteBuffer).
#include "exposure.h"
#include <math.h>
#include <string.h>
#include <jni.h>
#include <unistd.h>
#include <android/log.h>
jfloat
Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_overExposureOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
if (imageBuffer == 0) {
return 0.0f;
}
const int numPixels = width * height;
unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
int output = 0;
float tempLuminance = 0.0f;
for (int i = 0; i < numPixels; i++) {
tempLuminance = (0.2126f * *(srcPtr + 4 * i) +
0.7152f * *(srcPtr + 4 * i + 1) +
0.0722f * *(srcPtr + 4 * i + 2));
if (tempLuminance + 5 >= 255) {
output++;
}
}
return (static_cast<float>(output)) / numPixels;
}
jfloat
Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_underExposureOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
if (imageBuffer == 0) {
return 0.0f;
}
const int numPixels = width * height;
unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
int output = 0;
float tempLuminance = 0.0f;
for (int i = 0; i < numPixels; i++) {
tempLuminance = (0.2126f * *(srcPtr + 4 * i) +
0.7152f * *(srcPtr + 4 * i + 1) +
0.0722f * *(srcPtr + 4 * i + 2));
if (tempLuminance - 5 <= 0) {
output++;
}
}
return (static_cast<float>(output)) / numPixels;
}

View File

@ -0,0 +1,39 @@
/* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract exposure from image (handed down as ByteBuffer).
#ifndef ANDROID_FILTERFW_JNI_EXPOSURE_H
#define ANDROID_FILTERFW_JNI_EXPOSURE_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jfloat JNICALL
Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_underExposureOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
JNIEXPORT jfloat JNICALL
Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_overExposureOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_EXPOSURE_H

View File

@ -0,0 +1,76 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract histogram from image (handed down as ByteBuffer).
#include "frametovalues.h"
#include <string.h>
#include <jni.h>
#include <unistd.h>
#include <android/log.h>
#include "imgprocutil.h"
jboolean Java_androidx_media_filterpacks_image_ToGrayValuesFilter_toGrayValues(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject grayBuffer )
{
unsigned char* pixelPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
unsigned char* grayPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(grayBuffer));
if (pixelPtr == 0 || grayPtr == 0) {
return JNI_FALSE;
}
int numPixels = env->GetDirectBufferCapacity(imageBuffer) / 4;
// TODO: the current implementation is focused on the correctness not performance.
// If performance becomes an issue, it is better to increment pixelPtr directly.
int disp = 0;
for(int idx = 0; idx < numPixels; idx++, disp+=4) {
int R = *(pixelPtr + disp);
int G = *(pixelPtr + disp + 1);
int B = *(pixelPtr + disp + 2);
int gray = getIntensityFast(R, G, B);
*(grayPtr+idx) = static_cast<unsigned char>(gray);
}
return JNI_TRUE;
}
jboolean Java_androidx_media_filterpacks_image_ToRgbValuesFilter_toRgbValues(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject rgbBuffer )
{
unsigned char* pixelPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
unsigned char* rgbPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(rgbBuffer));
if (pixelPtr == 0 || rgbPtr == 0) {
return JNI_FALSE;
}
int numPixels = env->GetDirectBufferCapacity(imageBuffer) / 4;
// TODO: this code could be revised to improve the performance as the TODO above.
int pixelDisp = 0;
int rgbDisp = 0;
for(int idx = 0; idx < numPixels; idx++, pixelDisp += 4, rgbDisp += 3) {
for (int c = 0; c < 3; ++c) {
*(rgbPtr + rgbDisp + c) = *(pixelPtr + pixelDisp + c);
}
}
return JNI_TRUE;
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native functions to pack a RGBA frame into either a one channel grayscale buffer
// or a three channel RGB buffer.
#ifndef ANDROID_FILTERFW_JNI_TOGRAYVALUES_H
#define ANDROID_FILTERFW_JNI_TOGRAYVALUES_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jboolean JNICALL
Java_androidx_media_filterpacks_image_ToGrayValuesFilter_toGrayValues(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject grayBuffer );
JNIEXPORT jboolean JNICALL
Java_androidx_media_filterpacks_image_ToRgbValuesFilter_toRgbValues(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject rgbBuffer );
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_TOGRAYVALUES_H

View File

@ -0,0 +1,128 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract histogram from image (handed down as ByteBuffer).
#include "histogram.h"
#include <string.h>
#include <jni.h>
#include <unistd.h>
#include <android/log.h>
#include "imgprocutil.h"
inline void addPixelToHistogram(unsigned char*& pImg, int* pHist, int numBins) {
int R = *(pImg++);
int G = *(pImg++);
int B = *(pImg++);
++pImg;
int i = getIntensityFast(R, G, B);
int bin = clamp(0, static_cast<int>(static_cast<float>(i * numBins) / 255.0f), numBins - 1);
++pHist[bin];
}
void Java_androidx_media_filterpacks_histogram_GrayHistogramFilter_extractHistogram(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject maskBuffer, jobject histogramBuffer )
{
unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
int* pHist = static_cast<int*>(env->GetDirectBufferAddress(histogramBuffer));
int numPixels = env->GetDirectBufferCapacity(imageBuffer) / 4; // 4 bytes per pixel
int numBins = env->GetDirectBufferCapacity(histogramBuffer);
unsigned char* pMask = NULL;
if(maskBuffer != NULL) {
pMask = static_cast<unsigned char*>(env->GetDirectBufferAddress(maskBuffer));
}
for(int i = 0; i < numBins; ++i) pHist[i] = 0;
if(pMask == NULL) {
for( ; numPixels > 0; --numPixels) {
addPixelToHistogram(pImg, pHist, numBins);
}
} else {
for( ; numPixels > 0; --numPixels) {
if(*pMask == 0){
pMask += 4;
pImg += 4; // Note that otherwise addPixelToHistogram advances pImg by 4
continue;
}
pMask += 4;
addPixelToHistogram(pImg, pHist, numBins);
}
}
}
void Java_androidx_media_filterpacks_histogram_ChromaHistogramFilter_extractChromaHistogram(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer, jint hBins, jint sBins)
{
unsigned char* pixelIn = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
float* histOut = static_cast<float*>(env->GetDirectBufferAddress(histogramBuffer));
int numPixels = env->GetDirectBufferCapacity(imageBuffer) / 4; // 4 bytes per pixel
for (int i = 0; i < hBins * sBins; ++i) histOut[i] = 0.0f;
int h, s, v;
float hScaler = hBins / 256.0f;
float sScaler = sBins / 256.0f;
for( ; numPixels > 0; --numPixels) {
h = *(pixelIn++);
s = *(pixelIn++);
v = *(pixelIn++);
pixelIn++;
int index = static_cast<int>(s * sScaler) * hBins + static_cast<int>(h * hScaler);
histOut[index] += 1.0f;
}
}
void Java_androidx_media_filterpacks_histogram_NewChromaHistogramFilter_extractChromaHistogram(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer,
jint hueBins, jint saturationBins, jint valueBins,
jint saturationThreshold, jint valueThreshold) {
unsigned char* pixelIn = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
float* histOut = static_cast<float*>(env->GetDirectBufferAddress(histogramBuffer));
int numPixels = env->GetDirectBufferCapacity(imageBuffer) / 4; // 4 bytes per pixel
// TODO: add check on the size of histOut
for (int i = 0; i < (hueBins * saturationBins + valueBins); ++i) {
histOut[i] = 0.0f;
}
for( ; numPixels > 0; --numPixels) {
int h = *(pixelIn++);
int s = *(pixelIn++);
int v = *(pixelIn++);
pixelIn++;
// If a pixel that is either too dark (less than valueThreshold) or colorless
// (less than saturationThreshold), if will be put in a 1-D value histogram instead.
int index;
if (s > saturationThreshold && v > valueThreshold) {
int sIndex = s * saturationBins / 256;
// Shifting hue index by 0.5 such that peaks of red, yellow, green, cyan, blue, pink
// will be at the center of some bins.
int hIndex = ((h * hueBins + 128) / 256) % hueBins;
index = sIndex * hueBins + hIndex;
} else {
index = hueBins * saturationBins + (v * valueBins / 256);
}
histOut[index] += 1.0f;
}
}

View File

@ -0,0 +1,46 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract histogram from image (handed down as ByteBuffer).
#ifndef ANDROID_FILTERFW_JNI_HISTOGRAM_H
#define ANDROID_FILTERFW_JNI_HISTOGRAM_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT void JNICALL
Java_androidx_media_filterpacks_histogram_GrayHistogramFilter_extractHistogram(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject maskBuffer, jobject histogramBuffer );
JNIEXPORT void JNICALL
Java_androidx_media_filterpacks_histogram_ChromaHistogramFilter_extractChromaHistogram(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer, jint hBins, jint sBins);
JNIEXPORT void JNICALL
Java_androidx_media_filterpacks_histogram_NewChromaHistogramFilter_extractChromaHistogram(
JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer,
jint hueBins, jint saturationBins, jint valueBins,
jint saturationThreshold, jint valueThreshold);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_HISTOGRAM_H

View File

@ -0,0 +1,32 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Some native low-level image processing functions.
#ifndef ANDROID_FILTERFW_JNI_IMGPROCUTIL_H
#define ANDROID_FILTERFW_JNI_IMGPROCUTIL_H
inline int getIntensityFast(int R, int G, int B) {
return (R + R + R + B + G + G + G + G) >> 3; // see http://stackoverflow.com/a/596241
}
inline int clamp(int min, int val, int max) {
return val < min ? min : (val > max ? max : val);
// Note that for performance reasons, this function does *not* check if min < max!
}
#endif // ANDROID_FILTERFW_JNI_IMGPROCUTIL_H

View File

@ -0,0 +1,38 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "pixelutils.h"
#include <stdint.h>
typedef uint32_t uint32;
void JNI_PIXELUTILS_METHOD(nativeCopyPixels)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height, jint offset,
jint pixStride, jint rowStride) {
uint32* pInPix = static_cast<uint32*>(env->GetDirectBufferAddress(input));
uint32* pOutput = static_cast<uint32*>(env->GetDirectBufferAddress(output));
uint32* pOutRow = pOutput + offset;
for (int y = 0; y < height; ++y) {
uint32* pOutPix = pOutRow;
for (int x = 0; x < width; ++x) {
*pOutPix = *(pInPix++);
pOutPix += pixStride;
}
pOutRow += rowStride;
}
}

View File

@ -0,0 +1,39 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_FILTERFW_JNI_PIXELUTILS_H
#define ANDROID_FILTERFW_JNI_PIXELUTILS_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
#define JNI_PIXELUTILS_METHOD(METHOD_NAME) \
Java_androidx_media_filterfw_PixelUtils_ ## METHOD_NAME
JNIEXPORT void JNICALL
JNI_PIXELUTILS_METHOD(nativeCopyPixels)(
JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height, jint offset,
jint pixStride, jint rowStride);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_PIXELUTILS_H

View File

@ -0,0 +1,116 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract histogram from image (handed down as ByteBuffer).
#include "sobeloperator.h"
#include <math.h>
#include <string.h>
#include <jni.h>
#include <unistd.h>
#include <android/log.h>
#include "imgprocutil.h"
/*
* Perform 1d convolution on 3 channel image either horizontally or vertically.
* Parameters:
* inputHead: pointer to input image
* length: the length of image in the chosen axis.
* fragments: number of lines of the image in the chosen axis.
* step: the 1d pixel distance between adjacent pixels in the chosen axis.
* shift: the 1d pixel distance between adjacent lines in the chosen axis.
* filter: pointer to 1d filter
* halfSize: the length of filter is supposed to be (2 * halfSize + 1)
* outputHead: pointer to output image
*/
void computeGradient(unsigned char* dataPtr, int width, int height, short* gxPtr, short* gyPtr) {
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
const int left = (j > 0)? -4 : 0;
const int right = (j < width - 1) ? 4 : 0;
const int curr = (i * width + j) * 4;
const int above = (i > 0) ? curr - 4 * width : curr;
const int below = (i < height - 1) ? curr + 4 * width : curr;
const int offset = (i * width + j) * 3;
for (int c = 0; c < 3; c++) {
*(gxPtr + offset + c) =
(*(dataPtr + curr + c + right) - *(dataPtr + curr + c + left)) * 2 +
*(dataPtr + above + c + right) - *(dataPtr + above + c + left) +
*(dataPtr + below + c + right) - *(dataPtr + below + c + left);
*(gyPtr + offset + c) =
(*(dataPtr + c + below) - *(dataPtr + c + above)) * 2 +
*(dataPtr + left + c + below) - *(dataPtr + left + c + above) +
*(dataPtr + right + c + below) - *(dataPtr + right + c + above);
}
}
}
}
jboolean Java_androidx_media_filterpacks_image_SobelFilter_sobelOperator(
JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer,
jobject magBuffer, jobject dirBuffer) {
if (imageBuffer == 0) {
return JNI_FALSE;
}
unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
unsigned char* magPtr = (magBuffer == 0) ?
0 : static_cast<unsigned char*>(env->GetDirectBufferAddress(magBuffer));
unsigned char* dirPtr = (dirBuffer == 0) ?
0 : static_cast<unsigned char*>(env->GetDirectBufferAddress(dirBuffer));
int numPixels = width * height;
// TODO: avoid creating and deleting these buffers within this native function.
short* gxPtr = new short[3 * numPixels];
short* gyPtr = new short[3 * numPixels];
computeGradient(srcPtr, width, height, gxPtr, gyPtr);
unsigned char* mag = magPtr;
unsigned char* dir = dirPtr;
for (int i = 0; i < numPixels; ++i) {
for (int c = 0; c < 3; c++) {
int gx = static_cast<int>(*(gxPtr + 3 * i + c) / 8 + 127.5);
int gy = static_cast<int>(*(gyPtr + 3 * i + c) / 8 + 127.5);
// emulate arithmetic in GPU.
gx = 2 * gx - 255;
gy = 2 * gy - 255;
if (magPtr != 0) {
double value = sqrt(gx * gx + gy * gy);
*(magPtr + 4 * i + c) = static_cast<unsigned char>(value);
}
if (dirPtr != 0) {
*(dirPtr + 4 * i + c) = static_cast<unsigned char>(
(atan(static_cast<double>(gy)/static_cast<double>(gx)) + 3.14) / 6.28);
}
}
//setting alpha change to 1.0 (255)
if (magPtr != 0) {
*(magPtr + 4 * i + 3) = 255;
}
if (dirPtr != 0) {
*(dirPtr + 4 * i + 3) = 255;
}
}
delete[] gxPtr;
delete[] gyPtr;
return JNI_TRUE;
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Native function to extract histogram from image (handed down as ByteBuffer).
#ifndef ANDROID_FILTERFW_JNI_SOBELOPERATOR_H
#define ANDROID_FILTERFW_JNI_SOBELOPERATOR_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jboolean JNICALL
Java_androidx_media_filterpacks_image_SobelFilter_sobelOperator(
JNIEnv* env, jclass clazz, jint width, jint height,
jobject imageBuffer, jobject magBuffer, jobject dirBuffer);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_SOBELOPERATOR_H

View File

@ -0,0 +1,70 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Stats (mean and stdev) scoring in the native.
#include "stats_scorer.h"
#include <jni.h>
#include <math.h>
void Java_androidx_media_filterpacks_numeric_StatsFilter_score(
JNIEnv* env, jobject thiz, jobject imageBuffer, jfloatArray statsArray)
{
unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
int numPixels = env->GetDirectBufferCapacity(imageBuffer); // 1 byte per pixel
float sum = 0.0;
float sumSquares = 0.0;
for (int i = 0; i < numPixels; ++i) {
float val = static_cast<float>(pImg[i]);
sum += val;
sumSquares += val * val;
}
jfloat result[2];
result[0] = sum / numPixels; // mean
result[1] = sqrt((sumSquares - numPixels * result[0] * result[0]) / (numPixels - 1)); // stdev.
env->SetFloatArrayRegion(statsArray, 0, 2, result);
}
void Java_androidx_media_filterpacks_numeric_StatsFilter_regionscore(
JNIEnv* env, jobject thiz, jobject imageBuffer, jint width, jint height,
jfloat left, jfloat top, jfloat right, jfloat bottom, jfloatArray statsArray)
{
unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
int xStart = static_cast<int>(width * left);
int xEnd = static_cast<int>(width * right);
int yStart = static_cast<int>(height * top);
int yEnd = static_cast<int>(height * bottom);
int numPixels = (xEnd - xStart) * (yEnd - yStart);
float sum = 0.0;
float sumSquares = 0.0;
for (int y = yStart; y < yEnd; y++) {
int disp = width * y;
for (int x = xStart; x < xEnd; ++x) {
float val = static_cast<float>(*(pImg + disp + x));
sum += val;
sumSquares += val * val;
}
}
jfloat result[2];
result[0] = sum / numPixels; // mean
result[1] = (numPixels == 1) ?
0 : sqrt((sumSquares - numPixels * result[0] * result[0]) / (numPixels - 1)); // stdev.
env->SetFloatArrayRegion(statsArray, 0, 2, result);
}

View File

@ -0,0 +1,44 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Stats (mean and stdev) scoring in the native.
#ifndef ANDROID_FILTERFW_JNI_STATS_SCORER_H
#define ANDROID_FILTERFW_JNI_STATS_SCORER_H
#include <jni.h>
#define JNI_FES_FUNCTION(name) Java_androidx_media_filterpacks_numeric_StatsFilter_ ## name
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT void JNICALL
JNI_FES_FUNCTION(score)(
JNIEnv* env, jobject thiz, jobject imageBuffer, jfloatArray statsArray);
JNIEXPORT void JNICALL
JNI_FES_FUNCTION(regionscore)(
JNIEnv* env, jobject thiz, jobject imageBuffer, jint width, jint height,
jfloat lefp, jfloat top, jfloat right, jfloat bottom, jfloatArray statsArray);
#ifdef __cplusplus
}
#endif
#endif // ANDROID_FILTERFW_JNI_STATS_SCORER_H

View File

@ -0,0 +1,20 @@
# To enable ProGuard in your project, edit project.properties
# to define the proguard.config property as described in that file.
#
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in ${sdk.dir}/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the ProGuard
# include property in project.properties.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

View File

@ -0,0 +1,15 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=android-16
android.library.reference.1=../../filterfw

Binary file not shown.

After

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 341 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 148 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<ImageView xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="180px"
android:layout_height="240px"
android:src="@drawable/black_screen"
android:adjustViewBounds="true"
/>

View File

@ -0,0 +1,97 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (C) 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<RelativeLayout android:id="@+id/surfaceViewLayout"
android:layout_width="wrap_content"
android:layout_height="1240px"
android:layout_alignParentTop="true" >
<SurfaceView android:id="@+id/cameraView"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_weight="1.0"
/>
<Button android:id="@+id/startButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/startButton"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
/>
<Button android:id="@+id/galleryOpenButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/galleryOpenButton"
android:layout_alignParentBottom="true"
android:layout_alignParentRight="true"
/>
<Spinner android:id="@+id/spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:entries="@array/number_array"
android:layout_alignParentTop="true"
android:layout_alignParentRight="true"
/>
<TextView android:id="@+id/imagesSavedTextView"
android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:padding="16dip"
android:text="@string/imagesSavedTextView"
android:layout_centerHorizontal="true"
android:layout_alignParentBottom="true"
android:textColor="#FF0000"
android:textSize="20sp"
/>
</RelativeLayout>
<HorizontalScrollView xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/scrollView"
android:layout_width="fill_parent"
android:layout_height="wrap_content" >
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/scrollViewLinearLayout"
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="320px">
</LinearLayout>
</HorizontalScrollView>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="wrap_content">
<TextView android:id="@+id/goodOrBadTextView"
android:layout_width="wrap_content"
android:layout_height="fill_parent"
android:padding="16dip"
android:text="@string/goodOrBadTextView"
/>
<TextView android:id="@+id/fpsTextView"
android:layout_height="fill_parent"
android:layout_width="wrap_content"
android:padding="16dip"
android:text="@string/fpsTextView"
/>
<TextView android:id="@+id/scoreTextView"
android:layout_height="fill_parent"
android:layout_width="wrap_content"
android:padding="16dip"
android:text="@string/scoreTextView"
/>
</LinearLayout>
</LinearLayout>

View File

@ -0,0 +1,182 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<graph>
<!-- Packages -->
<import package="androidx.media.filterpacks.base"/>
<import package="androidx.media.filterpacks.image"/>
<import package="androidx.media.filterpacks.video"/>
<import package="androidx.media.filterpacks.text" />
<import package="androidx.media.filterpacks.numeric" />
<import package="androidx.media.filterpacks.face" />
<import package="androidx.media.filterpacks.transform" />
<import package="androidx.media.filterpacks.performance" />
<import package="androidx.media.filterfw.samples.simplecamera" />
<import package="androidx.media.filterpacks.histogram" />
<import package="androidx.media.filterpacks.colorspace" />
<import package="androidx.media.filterpacks.sensors" />
<!-- Filters -->
<filter class="ResizeFilter" name="resize" >
<input name="outputWidth" intValue="480" />
<input name="outputHeight" intValue="640" />
</filter>
<filter class="Camera2Source" name="camera"/>
<filter class="BranchFilter" name="mainBranch" />
<filter class="BranchFilter" name="preMainBranch" />
<filter class="BranchFilter" name="featureBranch" />
<filter class="SurfaceHolderTarget" name="camViewTarget"/>
<filter class="ScaleFilter" name="scale" >
<input name="scale" floatValue="0.50"/>
</filter>
<filter class="SobelFilter" name="sobel" />
<filter class="StatsFilter" name="statsFilter" />
<filter class="NormFilter" name="normFilter" />
<filter class="TextViewTarget" name="goodOrBadTextView" />
<filter class="ToGrayValuesFilter" name="sobelConverter" />
<filter class="AverageFilter" name="avgFilter" />
<var name="startCapture" />
<filter class="ImageGoodnessFilter" name="goodnessFilter" >
<input name="capturing" varValue="startCapture" />
</filter>
<filter class="ToStringFilter" name="scoreToString" />
<filter class="TextViewTarget" name="scoreTextView" />
<filter class="ExposureFilter" name="exposure" />
<filter class="TextViewTarget" name="fpsTextView" />
<filter class="ToStringFilter" name="throughputToString" />
<filter class="ContrastRatioFilter" name="contrast" />
<filter class="ScaleFilter" name="secondaryScale" >
<input name="scale" floatValue="0.50"/>
</filter>
<filter class="ThroughputFilter" name="throughput" />
<filter class="NewChromaHistogramFilter" name="histogram" />
<filter class="ColorfulnessFilter" name="colorfulness" />
<filter class="MotionSensorWTime" name="motion" />
<filter class="AvgBrightnessFilter" name="brightness" />
<filter class="RotateFilter" name="rotate" />
<filter class="BrightnessFilter" name="snapBrightness" />
<filter class="WaveTriggerFilter" name="snapEffect" />
<!-- Connections -->
<connect sourceFilter="camera" sourcePort="video"
targetFilter="rotate" targetPort="image" />
<connect sourceFilter="camera" sourcePort="orientation"
targetFilter="rotate" targetPort="rotateAngle" />
<connect sourceFilter="rotate" sourcePort="image"
targetFilter="resize" targetPort="image" />
<connect sourceFilter="resize" sourcePort="image"
targetFilter="preMainBranch" targetPort="input" />
<connect sourceFilter="preMainBranch" sourcePort="toMainBranch"
targetFilter="scale" targetPort="image" />
<connect sourceFilter="scale" sourcePort="image"
targetFilter="mainBranch" targetPort="input" />
<connect sourceFilter="preMainBranch" sourcePort="toGoodnessFilter"
targetFilter="goodnessFilter" targetPort="image" />
<connect sourceFilter="mainBranch" sourcePort="toFeatureBranch"
targetFilter="secondaryScale" targetPort="image" />
<connect sourceFilter="secondaryScale" sourcePort="image"
targetFilter="featureBranch" targetPort="input" />
<connect sourceFilter="featureBranch" sourcePort="toSobel"
targetFilter="sobel" targetPort="image" />
<connect sourceFilter="sobel" sourcePort="magnitude"
targetFilter="sobelConverter" targetPort="image" />
<connect sourceFilter="sobelConverter" sourcePort="image"
targetFilter="statsFilter" targetPort="buffer" />
<connect sourceFilter="statsFilter" sourcePort="mean"
targetFilter="normFilter" targetPort="x" />
<connect sourceFilter="statsFilter" sourcePort="stdev"
targetFilter="normFilter" targetPort="y" />
<connect sourceFilter="normFilter" sourcePort="norm"
targetFilter="avgFilter" targetPort="sharpness" />
<connect sourceFilter="avgFilter" sourcePort="avg"
targetFilter="goodnessFilter" targetPort="sharpness" />
<connect sourceFilter="goodnessFilter" sourcePort="goodOrBadPic"
targetFilter="goodOrBadTextView" targetPort="text" />
<connect sourceFilter="featureBranch" sourcePort="toExposure"
targetFilter="exposure" targetPort="image" />
<connect sourceFilter="exposure" sourcePort="underExposureRating"
targetFilter="goodnessFilter" targetPort="underExposure" />
<connect sourceFilter="exposure" sourcePort="overExposureRating"
targetFilter="goodnessFilter" targetPort="overExposure" />
<connect sourceFilter="goodnessFilter" sourcePort="score"
targetFilter="scoreToString" targetPort="object" />
<connect sourceFilter="scoreToString" sourcePort="string"
targetFilter="scoreTextView" targetPort="text" />
<connect sourceFilter="mainBranch" sourcePort="camView"
targetFilter="throughput" targetPort="frame" />
<connect sourceFilter="throughput" sourcePort="frame"
targetFilter="snapBrightness" targetPort="image" />
<connect sourceFilter="snapEffect" sourcePort="value"
targetFilter="snapBrightness" targetPort="brightness" />
<connect sourceFilter="snapBrightness" sourcePort="image"
targetFilter="camViewTarget" targetPort="image" />
<connect sourceFilter="throughput" sourcePort="throughput"
targetFilter="throughputToString" targetPort="object" />
<connect sourceFilter="throughputToString" sourcePort="string"
targetFilter="fpsTextView" targetPort="text" />
<connect sourceFilter="featureBranch" sourcePort="contrastRatio"
targetFilter="contrast" targetPort="image" />
<connect sourceFilter="contrast" sourcePort="contrastRatingToGoodness"
targetFilter="goodnessFilter" targetPort="contrastRating" />
<connect sourceFilter="mainBranch" sourcePort="colorfulness"
targetFilter="histogram" targetPort="image" />
<connect sourceFilter="histogram" sourcePort="histogram"
targetFilter="colorfulness" targetPort="histogram" />
<connect sourceFilter="colorfulness" sourcePort="score"
targetFilter="goodnessFilter" targetPort="colorfulness" />
<connect sourceFilter="motion" sourcePort="values"
targetFilter="goodnessFilter" targetPort="motionValues" />
<connect sourceFilter="featureBranch" sourcePort="brightness"
targetFilter="brightness" targetPort="image" />
<connect sourceFilter="brightness" sourcePort="brightnessRating"
targetFilter="goodnessFilter" targetPort="brightness" />
</graph>

View File

@ -0,0 +1,5 @@
<resources>
<style name="AppTheme" parent="android:Theme.Holo.Light" />
</resources>

View File

@ -0,0 +1,5 @@
<resources>
<style name="AppTheme" parent="android:Theme.Holo.Light.DarkActionBar" />
</resources>

View File

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<string name="goodOrBadTextView"> Good/Bad Picture </string>
<string name="fpsTextView"> FPS </string>
<string name="scoreTextView"> Score</string>
<string name="gallery"> Go To Gallery </string>
<string name="camera"> Go To Camera </string>
<string name="startButton" > Start </string>
<string name="imagesSavedTextView" > Images Saved </string>
<string name="galleryOpenButton" > Gallery </string>
<string-array name="number_array">
<item> 1 </item>
<item> 2 </item>
<item> 3 </item>
<item> 4 </item>
<item> 5 </item>
<item> 6 </item>
<item> 7 </item>
<item> 8 </item>
<item> 9 </item>
<item> 10 </item>
</string-array>
</resources>

View File

@ -0,0 +1,5 @@
<resources>
<style name="AppTheme" parent="android:Theme.Light" />
</resources>

View File

@ -0,0 +1,929 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package androidx.media.filterfw;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.os.Build;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.Type;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.Vector;
final class BackingStore {
/** Access mode None: Frame data will not be accessed at all. */
static final int ACCESS_NONE = 0x00;
/** Access mode Bytes: Frame data will be accessed as a ByteBuffer. */
static final int ACCESS_BYTES = 0x01;
/** Access mode Texture: Frame data will be accessed as a TextureSource. */
static final int ACCESS_TEXTURE = 0x02;
/** Access mode RenderTarget: Frame data will be accessed as a RenderTarget. */
static final int ACCESS_RENDERTARGET = 0x04;
/** Access mode Object: Frame data will be accessed as a generic Object. */
static final int ACCESS_OBJECT = 0x08;
/** Access mode Bitmap: Frame data will be accessed as a Bitmap. */
static final int ACCESS_BITMAP = 0x10;
/** Access mode Allocation: Frame data will be accessed as a RenderScript Allocation. */
static final int ACCESS_ALLOCATION = 0x20;
private static final int BACKING_BYTEBUFFER = 1;
private static final int BACKING_TEXTURE = 2;
private static final int BACKING_OBJECT = 3;
private static final int BACKING_BITMAP = 4;
private static final int BACKING_ALLOCATION = 5;
private final FrameType mType;
private int[] mDimensions;
private long mTimestamp = Frame.TIMESTAMP_NOT_SET;
private final FrameManager mFrameManager;
private Vector<Backing> mBackings = new Vector<Backing>();
private boolean mWriteLocked = false;
private int mReadLocks = 0;
private int mRefCount = 1;
/** The most up-to-date data backing */
private Backing mCurrentBacking = null;
/** The currently locked backing */
private Backing mLockedBacking = null;
// Public Methods //////////////////////////////////////////////////////////////////////////////
public BackingStore(FrameType type, int[] dimensions, FrameManager frameManager) {
mType = type;
mDimensions = dimensions != null ? Arrays.copyOf(dimensions, dimensions.length) : null;
mFrameManager = frameManager;
}
public FrameType getFrameType() {
return mType;
}
public Object lockData(int mode, int accessFormat) {
return lockBacking(mode, accessFormat).lock(accessFormat);
}
public Backing lockBacking(int mode, int access) {
Backing backing = fetchBacking(mode, access);
if (backing == null) {
throw new RuntimeException("Could not fetch frame data!");
}
lock(backing, mode);
return backing;
}
public boolean unlock() {
if (mWriteLocked) {
mWriteLocked = false;
} else if (mReadLocks > 0) {
--mReadLocks;
} else {
return false;
}
mLockedBacking.unlock();
mLockedBacking = null;
return true;
}
public BackingStore retain() {
if (mRefCount >= 10) {
Log.w("BackingStore", "High ref-count of " + mRefCount + " on " + this + "!");
}
if (mRefCount <= 0) {
throw new RuntimeException("RETAINING RELEASED");
}
++mRefCount;
return this;
}
public BackingStore release() {
if (mRefCount <= 0) {
throw new RuntimeException("DOUBLE-RELEASE");
}
--mRefCount;
if (mRefCount == 0) {
releaseBackings();
return null;
}
return this;
}
/**
* Resizes the backing store. This invalidates all data in the store.
*/
public void resize(int[] newDimensions) {
Vector<Backing> resized = new Vector<Backing>();
for (Backing backing : mBackings) {
if (backing.resize(newDimensions)) {
resized.add(backing);
} else {
releaseBacking(backing);
}
}
mBackings = resized;
mDimensions = newDimensions;
}
public int[] getDimensions() {
return mDimensions;
}
public int getElementCount() {
int result = 1;
if (mDimensions != null) {
for (int dim : mDimensions) {
result *= dim;
}
}
return result;
}
public void importStore(BackingStore store) {
// TODO: Better backing selection?
if (store.mBackings.size() > 0) {
importBacking(store.mBackings.firstElement());
}
mTimestamp = store.mTimestamp;
}
/**
* @return the timestamp
*/
public long getTimestamp() {
return mTimestamp;
}
/**
* @param timestamp the timestamp to set
*/
public void setTimestamp(long timestamp) {
mTimestamp = timestamp;
}
// Internal Methods ////////////////////////////////////////////////////////////////////////////
private Backing fetchBacking(int mode, int access) {
Backing backing = getBacking(mode, access);
if (backing == null) {
backing = attachNewBacking(mode, access);
}
syncBacking(backing);
return backing;
}
private void syncBacking(Backing backing) {
if (backing != null && backing.isDirty() && mCurrentBacking != null) {
backing.syncTo(mCurrentBacking);
}
}
private Backing getBacking(int mode, int access) {
// [Non-iterator looping]
for (int i = 0; i < mBackings.size(); ++i) {
final Backing backing = mBackings.get(i);
int backingAccess =
(mode == Frame.MODE_WRITE) ? backing.writeAccess() : backing.readAccess();
if ((backingAccess & access) == access) {
return backing;
}
}
return null;
}
private Backing attachNewBacking(int mode, int access) {
Backing backing = createBacking(mode, access);
if (mBackings.size() > 0) {
backing.markDirty();
}
mBackings.add(backing);
return backing;
}
private Backing createBacking(int mode, int access) {
// TODO: If the read/write access flags indicate, make/fetch a GraphicBuffer backing.
Backing backing = null;
int elemSize = mType.getElementSize();
if (shouldFetchCached(access)) {
backing = mFrameManager.fetchBacking(mode, access, mDimensions, elemSize);
}
if (backing == null) {
switch (access) {
case ACCESS_BYTES:
backing = new ByteBufferBacking();
break;
case ACCESS_TEXTURE:
case ACCESS_RENDERTARGET:
backing = new TextureBacking();
break;
case ACCESS_OBJECT:
backing = new ObjectBacking();
break;
case ACCESS_BITMAP:
backing = new BitmapBacking();
break;
case ACCESS_ALLOCATION:
if (!AllocationBacking.isSupported()) {
throw new RuntimeException(
"Attempted to create an AllocationBacking in context that does " +
"not support RenderScript!");
}
backing = new AllocationBacking(mFrameManager.getContext().getRenderScript());
break;
}
if (backing == null) {
throw new RuntimeException(
"Could not create backing for access type " + access + "!");
}
if (backing.requiresGpu() && !mFrameManager.getRunner().isOpenGLSupported()) {
throw new RuntimeException(
"Cannot create backing that requires GPU in a runner that does not " +
"support OpenGL!");
}
backing.setDimensions(mDimensions);
backing.setElementSize(elemSize);
backing.setElementId(mType.getElementId());
backing.allocate(mType);
mFrameManager.onBackingCreated(backing);
}
return backing;
}
private void importBacking(Backing backing) {
// TODO: This actually needs synchronization between the two BackingStore threads for the
// general case
int access = backing.requiresGpu() ? ACCESS_BYTES : backing.readAccess();
Backing newBacking = createBacking(Frame.MODE_READ, access);
newBacking.syncTo(backing);
mBackings.add(newBacking);
mCurrentBacking = newBacking;
}
private void releaseBackings() {
// [Non-iterator looping]
for (int i = 0; i < mBackings.size(); ++i) {
releaseBacking(mBackings.get(i));
}
mBackings.clear();
mCurrentBacking = null;
}
private void releaseBacking(Backing backing) {
mFrameManager.onBackingAvailable(backing);
}
private void lock(Backing backingToLock, int mode) {
if (mode == Frame.MODE_WRITE) {
// Make sure frame is not read-locked
if (mReadLocks > 0) {
throw new RuntimeException(
"Attempting to write-lock the read-locked frame " + this + "!");
} else if (mWriteLocked) {
throw new RuntimeException(
"Attempting to write-lock the write-locked frame " + this + "!");
}
// Mark all other backings dirty
// [Non-iterator looping]
for (int i = 0; i < mBackings.size(); ++i) {
final Backing backing = mBackings.get(i);
if (backing != backingToLock) {
backing.markDirty();
}
}
mWriteLocked = true;
mCurrentBacking = backingToLock;
} else {
if (mWriteLocked) {
throw new RuntimeException("Attempting to read-lock locked frame " + this + "!");
}
++mReadLocks;
}
mLockedBacking = backingToLock;
}
private static boolean shouldFetchCached(int access) {
return access != ACCESS_OBJECT;
}
// Backings ////////////////////////////////////////////////////////////////////////////////////
static abstract class Backing {
protected int[] mDimensions = null;
private int mElementSize;
private int mElementID;
protected boolean mIsDirty = false;
int cachePriority = 0;
public abstract void allocate(FrameType frameType);
public abstract int readAccess();
public abstract int writeAccess();
public abstract void syncTo(Backing backing);
public abstract Object lock(int accessType);
public abstract int getType();
public abstract boolean shouldCache();
public abstract boolean requiresGpu();
public abstract void destroy();
public abstract int getSize();
public void unlock() {
// Default implementation does nothing.
}
public void setData(Object data) {
throw new RuntimeException("Internal error: Setting data on frame backing " + this
+ ", which does not support setting data directly!");
}
public void setDimensions(int[] dimensions) {
mDimensions = dimensions;
}
public void setElementSize(int elemSize) {
mElementSize = elemSize;
}
public void setElementId(int elemId) {
mElementID = elemId;
}
public int[] getDimensions() {
return mDimensions;
}
public int getElementSize() {
return mElementSize;
}
public int getElementId() {
return mElementID;
}
public boolean resize(int[] newDimensions) {
return false;
}
public void markDirty() {
mIsDirty = true;
}
public boolean isDirty() {
return mIsDirty;
}
protected void assertImageCompatible(FrameType type) {
if (type.getElementId() != FrameType.ELEMENT_RGBA8888) {
throw new RuntimeException("Cannot allocate texture with non-RGBA data type!");
} else if (mDimensions == null || mDimensions.length != 2) {
throw new RuntimeException("Cannot allocate non 2-dimensional texture!");
}
}
}
static class ObjectBacking extends Backing {
private Object mObject = null;
@Override
public void allocate(FrameType frameType) {
mObject = null;
}
@Override
public int readAccess() {
return ACCESS_OBJECT;
}
@Override
public int writeAccess() {
return ACCESS_OBJECT;
}
@Override
public void syncTo(Backing backing) {
switch (backing.getType()) {
case BACKING_OBJECT:
mObject = backing.lock(ACCESS_OBJECT);
backing.unlock();
break;
case BACKING_BITMAP:
mObject = backing.lock(ACCESS_BITMAP);
backing.unlock();
break;
default:
mObject = null;
}
mIsDirty = false;
}
@Override
public Object lock(int accessType) {
return mObject;
}
@Override
public int getType() {
return BACKING_OBJECT;
}
@Override
public boolean shouldCache() {
return false;
}
@Override
public boolean requiresGpu() {
return false;
}
@Override
public void destroy() {
mObject = null;
}
@Override
public int getSize() {
return 0;
}
@Override
public void setData(Object data) {
mObject = data;
}
}
static class BitmapBacking extends Backing {
private Bitmap mBitmap = null;
@Override
public void allocate(FrameType frameType) {
assertImageCompatible(frameType);
}
@Override
public int readAccess() {
return ACCESS_BITMAP;
}
@Override
public int writeAccess() {
return ACCESS_BITMAP;
}
@Override
public void syncTo(Backing backing) {
int access = backing.readAccess();
if ((access & ACCESS_BITMAP) != 0) {
mBitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
} else if ((access & ACCESS_BYTES) != 0) {
createBitmap();
ByteBuffer buffer = (ByteBuffer) backing.lock(ACCESS_BYTES);
mBitmap.copyPixelsFromBuffer(buffer);
buffer.rewind();
} else if ((access & ACCESS_TEXTURE) != 0) {
createBitmap();
RenderTarget renderTarget = (RenderTarget) backing.lock(ACCESS_RENDERTARGET);
mBitmap.copyPixelsFromBuffer(
renderTarget.getPixelData(mDimensions[0], mDimensions[1]));
} else if ((access & ACCESS_ALLOCATION) != 0 && AllocationBacking.isSupported()) {
createBitmap();
syncToAllocationBacking(backing);
} else {
throw new RuntimeException("Cannot sync bytebuffer backing!");
}
backing.unlock();
mIsDirty = false;
}
@TargetApi(11)
private void syncToAllocationBacking(Backing backing) {
Allocation allocation = (Allocation) backing.lock(ACCESS_ALLOCATION);
allocation.copyTo(mBitmap);
}
@Override
public Object lock(int accessType) {
return mBitmap;
}
@Override
public int getType() {
return BACKING_BITMAP;
}
@Override
public boolean shouldCache() {
return false;
}
@Override
public boolean requiresGpu() {
return false;
}
@Override
public void destroy() {
// As we share the bitmap with other backings (such as object backings), we must not
// recycle it here.
mBitmap = null;
}
@Override
public int getSize() {
return 4 * mDimensions[0] * mDimensions[1];
}
@Override
public void setData(Object data) {
// We can assume that data will always be a Bitmap instance.
mBitmap = (Bitmap) data;
}
private void createBitmap() {
mBitmap = Bitmap.createBitmap(mDimensions[0], mDimensions[1], Bitmap.Config.ARGB_8888);
}
}
static class TextureBacking extends Backing {
private RenderTarget mRenderTarget = null;
private TextureSource mTexture = null;
@Override
public void allocate(FrameType frameType) {
assertImageCompatible(frameType);
mTexture = TextureSource.newTexture();
}
@Override
public int readAccess() {
return ACCESS_TEXTURE;
}
@Override
public int writeAccess() {
return ACCESS_RENDERTARGET;
}
@Override
public void syncTo(Backing backing) {
int access = backing.readAccess();
if ((access & ACCESS_BYTES) != 0) {
ByteBuffer pixels = (ByteBuffer) backing.lock(ACCESS_BYTES);
mTexture.allocateWithPixels(pixels, mDimensions[0], mDimensions[1]);
} else if ((access & ACCESS_BITMAP) != 0) {
Bitmap bitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
mTexture.allocateWithBitmapPixels(bitmap);
} else if ((access & ACCESS_TEXTURE) != 0) {
TextureSource texture = (TextureSource) backing.lock(ACCESS_TEXTURE);
int w = mDimensions[0];
int h = mDimensions[1];
ImageShader.renderTextureToTarget(texture, getRenderTarget(), w, h);
} else if ((access & ACCESS_ALLOCATION) != 0 && AllocationBacking.isSupported()) {
syncToAllocationBacking(backing);
} else {
throw new RuntimeException("Cannot sync bytebuffer backing!");
}
backing.unlock();
mIsDirty = false;
}
@TargetApi(11)
private void syncToAllocationBacking(Backing backing) {
Allocation allocation = (Allocation) backing.lock(ACCESS_ALLOCATION);
ByteBuffer pixels = ByteBuffer.allocateDirect(getSize());
allocation.copyTo(pixels.array());
mTexture.allocateWithPixels(pixels, mDimensions[0], mDimensions[1]);
}
@Override
public Object lock(int accessType) {
switch (accessType) {
case ACCESS_TEXTURE:
return getTexture();
case ACCESS_RENDERTARGET:
return getRenderTarget();
default:
throw new RuntimeException("Illegal access to texture!");
}
}
@Override
public int getType() {
return BACKING_TEXTURE;
}
@Override
public boolean shouldCache() {
return true;
}
@Override
public boolean requiresGpu() {
return true;
}
@Override
public void destroy() {
if (mRenderTarget != null) {
mRenderTarget.release();
}
if (mTexture.isAllocated()) {
mTexture.release();
}
}
@Override
public int getSize() {
return 4 * mDimensions[0] * mDimensions[1];
}
private TextureSource getTexture() {
if (!mTexture.isAllocated()) {
mTexture.allocate(mDimensions[0], mDimensions[1]);
}
return mTexture;
}
private RenderTarget getRenderTarget() {
if (mRenderTarget == null) {
int w = mDimensions[0];
int h = mDimensions[1];
mRenderTarget = RenderTarget.currentTarget().forTexture(getTexture(), w, h);
}
return mRenderTarget;
}
}
static class ByteBufferBacking extends Backing {
ByteBuffer mBuffer = null;
@Override
public void allocate(FrameType frameType) {
int size = frameType.getElementSize();
for (int dim : mDimensions) {
size *= dim;
}
mBuffer = ByteBuffer.allocateDirect(size);
}
@Override
public int readAccess() {
return ACCESS_BYTES;
}
@Override
public int writeAccess() {
return ACCESS_BYTES;
}
@Override
public boolean requiresGpu() {
return false;
}
@Override
public void syncTo(Backing backing) {
int access = backing.readAccess();
if ((access & ACCESS_TEXTURE) != 0) {
RenderTarget target = (RenderTarget) backing.lock(ACCESS_RENDERTARGET);
GLToolbox.readTarget(target, mBuffer, mDimensions[0], mDimensions[1]);
} else if ((access & ACCESS_BITMAP) != 0) {
Bitmap bitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
bitmap.copyPixelsToBuffer(mBuffer);
mBuffer.rewind();
} else if ((access & ACCESS_BYTES) != 0) {
ByteBuffer otherBuffer = (ByteBuffer) backing.lock(ACCESS_BYTES);
mBuffer.put(otherBuffer);
otherBuffer.rewind();
} else if ((access & ACCESS_ALLOCATION) != 0 && AllocationBacking.isSupported()) {
syncToAllocationBacking(backing);
} else {
throw new RuntimeException("Cannot sync bytebuffer backing!");
}
backing.unlock();
mBuffer.rewind();
mIsDirty = false;
}
@TargetApi(11)
private void syncToAllocationBacking(Backing backing) {
Allocation allocation = (Allocation) backing.lock(ACCESS_ALLOCATION);
if (getElementId() == FrameType.ELEMENT_RGBA8888) {
byte[] bytes = mBuffer.array();
allocation.copyTo(bytes);
} else if (getElementId() == FrameType.ELEMENT_FLOAT32) {
float[] floats = new float[getSize() / 4];
allocation.copyTo(floats);
mBuffer.asFloatBuffer().put(floats);
} else {
throw new RuntimeException(
"Trying to sync to an allocation with an unsupported element id: "
+ getElementId());
}
}
@Override
public Object lock(int accessType) {
return mBuffer.rewind();
}
@Override
public void unlock() {
mBuffer.rewind();
}
@Override
public int getType() {
return BACKING_BYTEBUFFER;
}
@Override
public boolean shouldCache() {
return true;
}
@Override
public void destroy() {
mBuffer = null;
}
@Override
public int getSize() {
return mBuffer.remaining();
}
}
@TargetApi(11)
static class AllocationBacking extends Backing {
private final RenderScript mRenderScript;
private Allocation mAllocation = null;
public AllocationBacking(RenderScript renderScript) {
mRenderScript = renderScript;
}
@Override
public void allocate(FrameType frameType) {
assertCompatible(frameType);
Element element = null;
switch (frameType.getElementId()) {
case FrameType.ELEMENT_RGBA8888:
element = Element.RGBA_8888(mRenderScript);
break;
case FrameType.ELEMENT_FLOAT32:
element = Element.F32(mRenderScript);
break;
}
Type.Builder imageTypeBuilder = new Type.Builder(mRenderScript, element);
imageTypeBuilder.setX(mDimensions.length >= 1 ? mDimensions[0] : 1);
imageTypeBuilder.setY(mDimensions.length == 2 ? mDimensions[1] : 1);
Type imageType = imageTypeBuilder.create();
mAllocation = Allocation.createTyped(mRenderScript, imageType);
}
@Override
public int readAccess() {
return ACCESS_ALLOCATION;
}
@Override
public int writeAccess() {
return ACCESS_ALLOCATION;
}
@Override
public boolean requiresGpu() {
return false;
}
@Override
public void syncTo(Backing backing) {
int access = backing.readAccess();
if ((access & ACCESS_TEXTURE) != 0) {
RenderTarget target = (RenderTarget) backing.lock(ACCESS_RENDERTARGET);
ByteBuffer pixels = ByteBuffer.allocateDirect(getSize());
GLToolbox.readTarget(target, pixels, mDimensions[0], mDimensions[1]);
mAllocation.copyFrom(pixels.array());
} else if ((access & ACCESS_BITMAP) != 0) {
Bitmap bitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
mAllocation.copyFrom(bitmap);
} else if ((access & ACCESS_BYTES) != 0) {
ByteBuffer buffer = (ByteBuffer) backing.lock(ACCESS_BYTES);
if (buffer.order() != ByteOrder.nativeOrder()) {
throw new RuntimeException(
"Trying to sync to the ByteBufferBacking with non-native byte order!");
}
byte[] bytes;
if (buffer.hasArray()) {
bytes = buffer.array();
} else {
bytes = new byte[getSize()];
buffer.get(bytes);
buffer.rewind();
}
mAllocation.copyFromUnchecked(bytes);
} else {
throw new RuntimeException("Cannot sync allocation backing!");
}
backing.unlock();
mIsDirty = false;
}
@Override
public Object lock(int accessType) {
return mAllocation;
}
@Override
public void unlock() {
}
@Override
public int getType() {
return BACKING_ALLOCATION;
}
@Override
public boolean shouldCache() {
return true;
}
@Override
public void destroy() {
if (mAllocation != null) {
mAllocation.destroy();
mAllocation = null;
}
}
@Override
public int getSize() {
int elementCount = 1;
for (int dim : mDimensions) {
elementCount *= dim;
}
return getElementSize() * elementCount;
}
public static boolean isSupported() {
return Build.VERSION.SDK_INT >= 11;
}
private void assertCompatible(FrameType type) {
// TODO: consider adding support for other data types.
if (type.getElementId() != FrameType.ELEMENT_RGBA8888
&& type.getElementId() != FrameType.ELEMENT_FLOAT32) {
throw new RuntimeException(
"Cannot allocate allocation with a non-RGBA or non-float data type!");
}
if (mDimensions == null || mDimensions.length > 2) {
throw new RuntimeException(
"Cannot create an allocation with more than 2 dimensions!");
}
}
}
}

View File

@ -0,0 +1,88 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.base;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.Frame;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
public final class BranchFilter extends Filter {
private boolean mSynchronized = true;
public BranchFilter(MffContext context, String name) {
super(context, name);
}
public BranchFilter(MffContext context, String name, boolean synced) {
super(context, name);
mSynchronized = synced;
}
@Override
public Signature getSignature() {
return new Signature()
.addInputPort("input", Signature.PORT_REQUIRED, FrameType.any())
.addInputPort("synchronized", Signature.PORT_OPTIONAL,FrameType.single(boolean.class))
.disallowOtherInputs();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("input")) {
for (OutputPort outputPort : getConnectedOutputPorts()) {
port.attachToOutputPort(outputPort);
}
} else if (port.getName().equals("synchronized")) {
port.bindToFieldNamed("mSynchronized");
port.setAutoPullEnabled(true);
}
}
@Override
protected void onOpen() {
updateSynchronization();
}
@Override
protected void onProcess() {
Frame inputFrame = getConnectedInputPort("input").pullFrame();
for (OutputPort outputPort : getConnectedOutputPorts()) {
if (outputPort.isAvailable()) {
outputPort.pushFrame(inputFrame);
}
}
}
private void updateSynchronization() {
if (mSynchronized) {
for (OutputPort port : getConnectedOutputPorts()) {
port.setWaitsUntilAvailable(true);
}
} else {
for (OutputPort port : getConnectedOutputPorts()) {
port.setWaitsUntilAvailable(false);
}
}
}
}

View File

@ -0,0 +1,88 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.image;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.FrameImage2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.ImageShader;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
public class BrightnessFilter extends Filter {
private float mBrightness = 1.0f;
private ImageShader mShader;
private static final String mBrightnessShader =
"precision mediump float;\n" +
"uniform sampler2D tex_sampler_0;\n" +
"uniform float brightness;\n" +
"varying vec2 v_texcoord;\n" +
"void main() {\n" +
" vec4 color = texture2D(tex_sampler_0, v_texcoord);\n" +
" if (brightness < 0.5) {\n" +
" gl_FragColor = color * (2.0 * brightness);\n" +
" } else {\n" +
" vec4 diff = 1.0 - color;\n" +
" gl_FragColor = color + diff * (2.0 * (brightness - 0.5));\n" +
" }\n" +
"}\n";
public BrightnessFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
return new Signature()
.addInputPort("image", Signature.PORT_REQUIRED, imageIn)
.addInputPort("brightness", Signature.PORT_OPTIONAL, FrameType.single(float.class))
.addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
.disallowOtherPorts();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("brightness")) {
port.bindToFieldNamed("mBrightness");
port.setAutoPullEnabled(true);
}
}
@Override
protected void onPrepare() {
mShader = new ImageShader(mBrightnessShader);
}
@Override
protected void onProcess() {
OutputPort outPort = getConnectedOutputPort("image");
FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
int[] dim = inputImage.getDimensions();
FrameImage2D outputImage = outPort.fetchAvailableFrame(dim).asFrameImage2D();
mShader.setUniformValue("brightness", mBrightness);
mShader.process(inputImage, outputImage);
outPort.pushFrame(outputImage);
}
}

View File

@ -0,0 +1,137 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import java.nio.ByteBuffer;
/**
* Utility functions to convert between color-spaces.
*
* Currently these methods are all CPU based native methods. These could be updated in the future
* to provide other implementations.
*/
public class ColorSpace {
/**
* Convert YUV420-Planer data to RGBA8888.
*
* The input data is expected to be laid out in 3 planes. The width x height Y plane, followed
* by the U and V planes, where each chroma value corresponds to a 2x2 luminance value block.
* YUV to RGB conversion is done using the ITU-R BT.601 transformation. The output buffer must
* be large enough to hold the data, and the dimensions must be multiples of 2.
*
* @param input data encoded in YUV420-Planar.
* @param output buffer to hold RGBA8888 data.
* @param width the width of the image (must be a multiple of 2)
* @param height the height of the image (must be a multiple of 2)
*/
public static void convertYuv420pToRgba8888(
ByteBuffer input, ByteBuffer output, int width, int height) {
expectInputSize(input, (3 * width * height) / 2);
expectOutputSize(output, width * height * 4);
nativeYuv420pToRgba8888(input, output, width, height);
}
/**
* Convert ARGB8888 to RGBA8888.
*
* The input data is expected to be encoded in 8-bit interleaved ARGB channels. The output
* buffer must be large enough to hold the data. The output buffer may be the same as the
* input buffer.
*
* @param input data encoded in ARGB8888.
* @param output buffer to hold RGBA8888 data.
* @param width the width of the image
* @param height the height of the image
*/
public static void convertArgb8888ToRgba8888(
ByteBuffer input, ByteBuffer output, int width, int height) {
expectInputSize(input, width * height * 4);
expectOutputSize(output, width * height * 4);
nativeArgb8888ToRgba8888(input, output, width, height);
}
/**
* Convert RGBA8888 to HSVA8888.
*
* The input data is expected to be encoded in 8-bit interleaved RGBA channels. The output
* buffer must be large enough to hold the data. The output buffer may be the same as the
* input buffer.
*
* @param input data encoded in RGBA8888.
* @param output buffer to hold HSVA8888 data.
* @param width the width of the image
* @param height the height of the image
*/
public static void convertRgba8888ToHsva8888(
ByteBuffer input, ByteBuffer output, int width, int height) {
expectInputSize(input, width * height * 4);
expectOutputSize(output, width * height * 4);
nativeRgba8888ToHsva8888(input, output, width, height);
}
/**
* Convert RGBA8888 to YCbCrA8888.
*
* The input data is expected to be encoded in 8-bit interleaved RGBA channels. The output
* buffer must be large enough to hold the data. The output buffer may be the same as the
* input buffer.
*
* @param input data encoded in RGBA8888.
* @param output buffer to hold YCbCrA8888 data.
* @param width the width of the image
* @param height the height of the image
*/
public static void convertRgba8888ToYcbcra8888(
ByteBuffer input, ByteBuffer output, int width, int height) {
expectInputSize(input, width * height * 4);
expectOutputSize(output, width * height * 4);
nativeRgba8888ToYcbcra8888(input, output, width, height);
}
private static void expectInputSize(ByteBuffer input, int expectedSize) {
if (input.remaining() < expectedSize) {
throw new IllegalArgumentException("Input buffer's size does not fit given width "
+ "and height! Expected: " + expectedSize + ", Got: " + input.remaining()
+ ".");
}
}
private static void expectOutputSize(ByteBuffer output, int expectedSize) {
if (output.remaining() < expectedSize) {
throw new IllegalArgumentException("Output buffer's size does not fit given width "
+ "and height! Expected: " + expectedSize + ", Got: " + output.remaining()
+ ".");
}
}
private static native void nativeYuv420pToRgba8888(
ByteBuffer input, ByteBuffer output, int width, int height);
private static native void nativeArgb8888ToRgba8888(
ByteBuffer input, ByteBuffer output, int width, int height);
private static native void nativeRgba8888ToHsva8888(
ByteBuffer input, ByteBuffer output, int width, int height);
private static native void nativeRgba8888ToYcbcra8888(
ByteBuffer input, ByteBuffer output, int width, int height);
static {
System.loadLibrary("smartcamera_jni");
}
}

View File

@ -0,0 +1,93 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Extract histogram from image.
package androidx.media.filterpacks.colorspace;
import androidx.media.filterfw.FrameValue;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.Frame;
import androidx.media.filterfw.FrameBuffer2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* ColorfulnessFilter takes in a particular Chroma histogram generated by NewChromaHistogramFilter
* and compute the colorfulness based on the entropy in Hue space.
*/
public final class ColorfulnessFilter extends Filter {
public ColorfulnessFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType dataIn = FrameType.buffer2D(FrameType.ELEMENT_FLOAT32);
return new Signature()
.addInputPort("histogram", Signature.PORT_REQUIRED, dataIn)
.addOutputPort("score", Signature.PORT_REQUIRED, FrameType.single(float.class))
.disallowOtherPorts();
}
@Override
protected void onProcess() {
FrameBuffer2D histogramFrame =
getConnectedInputPort("histogram").pullFrame().asFrameBuffer2D();
ByteBuffer byteBuffer = histogramFrame.lockBytes(Frame.MODE_READ);
byteBuffer.order(ByteOrder.nativeOrder());
FloatBuffer histogramBuffer = byteBuffer.asFloatBuffer();
histogramBuffer.rewind();
// Create a hue histogram from hue-saturation histogram
int hueBins = histogramFrame.getWidth();
int saturationBins = histogramFrame.getHeight() - 1;
float[] hueHistogram = new float[hueBins];
float total = 0;
for (int r = 0; r < saturationBins; ++r) {
float weight = (float) Math.pow(2, r);
for (int c = 0; c < hueBins; c++) {
float value = histogramBuffer.get() * weight;
hueHistogram[c] += value;
total += value;
}
}
float colorful = 0f;
for (int c = 0; c < hueBins; ++c) {
float value = hueHistogram[c] / total;
if (value > 0f) {
colorful -= value * ((float) Math.log(value));
}
}
colorful /= Math.log(2);
histogramFrame.unlock();
OutputPort outPort = getConnectedOutputPort("score");
FrameValue frameValue = outPort.fetchAvailableFrame(null).asFrameValue();
frameValue.setValue(colorful);
outPort.pushFrame(frameValue);
}
}

View File

@ -0,0 +1,165 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.transform;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.util.FloatMath;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.FrameImage2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.ImageShader;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
import androidx.media.filterfw.geometry.Quad;
public class CropFilter extends Filter {
private Quad mCropRect = Quad.fromRect(0f, 0f, 1f, 1f);
private int mOutputWidth = 0;
private int mOutputHeight = 0;
private ImageShader mShader;
private boolean mUseMipmaps = false;
private FrameImage2D mPow2Frame = null;
public CropFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
return new Signature()
.addInputPort("image", Signature.PORT_REQUIRED, imageIn)
.addInputPort("cropRect", Signature.PORT_REQUIRED, FrameType.single(Quad.class))
.addInputPort("outputWidth", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.addInputPort("outputHeight", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.addInputPort("useMipmaps", Signature.PORT_OPTIONAL, FrameType.single(boolean.class))
.addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
.disallowOtherPorts();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("cropRect")) {
port.bindToFieldNamed("mCropRect");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("outputWidth")) {
port.bindToFieldNamed("mOutputWidth");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("outputHeight")) {
port.bindToFieldNamed("mOutputHeight");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("useMipmaps")) {
port.bindToFieldNamed("mUseMipmaps");
port.setAutoPullEnabled(true);
}
}
@Override
protected void onPrepare() {
if (isOpenGLSupported()) {
mShader = ImageShader.createIdentity();
}
}
@Override
protected void onProcess() {
OutputPort outPort = getConnectedOutputPort("image");
// Pull input frame
FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
int[] inDims = inputImage.getDimensions();
int[] croppedDims = { (int)FloatMath.ceil(mCropRect.xEdge().length() * inDims[0]),
(int)FloatMath.ceil(mCropRect.yEdge().length() * inDims[1]) };
int[] outDims = { getOutputWidth(croppedDims[0], croppedDims[1]),
getOutputHeight(croppedDims[0], croppedDims[1]) };
FrameImage2D outputImage = outPort.fetchAvailableFrame(outDims).asFrameImage2D();
if (isOpenGLSupported()) {
FrameImage2D sourceFrame;
Quad sourceQuad = null;
boolean scaleDown = (outDims[0] < croppedDims[0]) || (outDims[1] < croppedDims[1]);
if (scaleDown && mUseMipmaps) {
mPow2Frame = TransformUtils.makeMipMappedFrame(mPow2Frame, croppedDims);
int[] extDims = mPow2Frame.getDimensions();
float targetWidth = croppedDims[0] / (float)extDims[0];
float targetHeight = croppedDims[1] / (float)extDims[1];
Quad targetQuad = Quad.fromRect(0f, 0f, targetWidth, targetHeight);
mShader.setSourceQuad(mCropRect);
mShader.setTargetQuad(targetQuad);
mShader.process(inputImage, mPow2Frame);
TransformUtils.generateMipMaps(mPow2Frame);
sourceFrame = mPow2Frame;
sourceQuad = targetQuad;
} else {
sourceFrame = inputImage;
sourceQuad = mCropRect;
}
mShader.setSourceQuad(sourceQuad);
mShader.setTargetRect(0f, 0f, 1f, 1f);
mShader.process(sourceFrame, outputImage);
} else {
// Convert quads to canvas coordinate space
Quad sourceQuad = mCropRect.scale2(inDims[0], inDims[1]);
Quad targetQuad = Quad.fromRect(0f, 0f, inDims[0], inDims[1]);
// Calculate transform for crop
Matrix transform = Quad.getTransform(sourceQuad, targetQuad);
transform.postScale(outDims[0] / (float)inDims[0], outDims[1] / (float)inDims[1]);
// Create target canvas
Bitmap.Config config = Bitmap.Config.ARGB_8888;
Bitmap cropped = Bitmap.createBitmap(outDims[0], outDims[1], config);
Canvas canvas = new Canvas(cropped);
// Draw source bitmap into target canvas
Paint paint = new Paint();
paint.setFilterBitmap(true);
Bitmap sourceBitmap = inputImage.toBitmap();
canvas.drawBitmap(sourceBitmap, transform, paint);
// Assign bitmap to output frame
outputImage.setBitmap(cropped);
}
outPort.pushFrame(outputImage);
}
@Override
protected void onClose() {
if (mPow2Frame != null){
mPow2Frame.release();
mPow2Frame = null;
}
}
protected int getOutputWidth(int inWidth, int inHeight) {
return mOutputWidth <= 0 ? inWidth : mOutputWidth;
}
protected int getOutputHeight(int inWidth, int inHeight) {
return mOutputHeight <= 0 ? inHeight : mOutputHeight;
}
}

View File

@ -0,0 +1,766 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.os.SystemClock;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Filters are the processing nodes of the filter graphs.
*
* Filters may have any number of input and output ports, through which the data frames flow.
* TODO: More documentation on filter life-cycle, port and type checking, GL and RenderScript, ...
*/
public abstract class Filter {
private static class State {
private static final int STATE_UNPREPARED = 1;
private static final int STATE_PREPARED = 2;
private static final int STATE_OPEN = 3;
private static final int STATE_CLOSED = 4;
private static final int STATE_DESTROYED = 5;
public int current = STATE_UNPREPARED;
public synchronized boolean check(int state) {
return current == state;
}
}
private final int REQUEST_FLAG_NONE = 0;
private final int REQUEST_FLAG_CLOSE = 1;
private String mName;
private MffContext mContext;
private FilterGraph mFilterGraph;
private State mState = new State();
private int mRequests = REQUEST_FLAG_NONE;
private int mMinimumAvailableInputs = 1;
private int mMinimumAvailableOutputs = 1;
private int mScheduleCount = 0;
private long mLastScheduleTime = 0;
private boolean mIsActive = true;
private AtomicBoolean mIsSleeping = new AtomicBoolean(false);
private long mCurrentTimestamp = Frame.TIMESTAMP_NOT_SET;
private HashMap<String, InputPort> mConnectedInputPorts = new HashMap<String, InputPort>();
private HashMap<String, OutputPort> mConnectedOutputPorts = new HashMap<String, OutputPort>();
private InputPort[] mConnectedInputPortArray = null;
private OutputPort[] mConnectedOutputPortArray = null;
private ArrayList<Frame> mAutoReleaseFrames = new ArrayList<Frame>();
/**
* Constructs a new filter.
* A filter is bound to a specific MffContext. Its name can be any String value, but it must
* be unique within the filter graph.
*
* Note that names starting with "$" are reserved for internal use, and should not be used.
*
* @param context The MffContext in which the filter will live.
* @param name The name of the filter.
*/
protected Filter(MffContext context, String name) {
mName = name;
mContext = context;
}
/**
* Checks whether the filter class is available on this platform.
* Some filters may not be installed on all platforms and can therefore not be instantiated.
* Before instantiating a filter, check if it is available by using this method.
*
* This method uses the shared FilterFactory to check whether the filter class is available.
*
* @param filterClassName The fully qualified class name of the Filter class.
* @return true, if filters of the specified class name are available.
*/
public static final boolean isAvailable(String filterClassName) {
return FilterFactory.sharedFactory().isFilterAvailable(filterClassName);
}
/**
* Returns the name of this filter.
*
* @return the name of the filter (specified during construction).
*/
public String getName() {
return mName;
}
/**
* Returns the signature of this filter.
*
* Subclasses should override this and return their filter signature. The default
* implementation returns a generic signature with no constraints.
*
* This method may be called at any time.
*
* @return the Signature instance for this filter.
*/
public Signature getSignature() {
return new Signature();
}
/**
* Returns the MffContext that the filter resides in.
*
* @return the MffContext of the filter.
*/
public MffContext getContext() {
return mContext;
}
/**
* Returns true, if the filter is active.
* TODO: thread safety?
*
* @return true, if the filter is active.
*/
public boolean isActive() {
return mIsActive;
}
/**
* Activates the current filter.
* Only active filters can be scheduled for execution. This method can only be called if the
* GraphRunner that is executing the filter is stopped or paused.
*/
public void activate() {
assertIsPaused();
if (!mIsActive) {
mIsActive = true;
}
}
/**
* Deactivates the current filter.
* Only active filters can be scheduled for execution. This method can only be called if the
* GraphRunner that is executing the filter is stopped or paused.
*/
public void deactivate() {
// TODO: Support close-on-deactivate (must happen in processing thread).
assertIsPaused();
if (mIsActive) {
mIsActive = false;
}
}
/**
* Returns the filter's set of input ports.
* Note that this contains only the *connected* input ports. To retrieve all
* input ports that this filter accepts, one has to go via the filter's Signature.
*
* @return An array containing all connected input ports.
*/
public final InputPort[] getConnectedInputPorts() {
return mConnectedInputPortArray;
}
/**
* Returns the filter's set of output ports.
* Note that this contains only the *connected* output ports. To retrieve all
* output ports that this filter provides, one has to go via the filter's Signature.
*
* @return An array containing all connected output ports.
*/
public final OutputPort[] getConnectedOutputPorts() {
return mConnectedOutputPortArray;
}
/**
* Returns the input port with the given name.
* Note that this can only access the *connected* input ports. To retrieve all
* input ports that this filter accepts, one has to go via the filter's Signature.
*
* @return the input port with the specified name, or null if no connected input port
* with this name exists.
*/
public final InputPort getConnectedInputPort(String name) {
return mConnectedInputPorts.get(name);
}
/**
* Returns the output port with the given name.
* Note that this can only access the *connected* output ports. To retrieve all
* output ports that this filter provides, one has to go via the filter's Signature.
*
* @return the output port with the specified name, or null if no connected output port
* with this name exists.
*/
public final OutputPort getConnectedOutputPort(String name) {
return mConnectedOutputPorts.get(name);
}
/**
* Called when an input port has been attached in the graph.
* Override this method, in case you want to be informed of any connected input ports, or make
* modifications to them. Note that you may not assume that any other ports have been attached
* already. If you have dependencies on other ports, override
* {@link #onInputPortOpen(InputPort)}. The default implementation does nothing.
*
* @param port The InputPort instance that was attached.
*/
protected void onInputPortAttached(InputPort port) {
}
/**
* Called when an output port has been attached in the graph.
* Override this method, in case you want to be informed of any connected output ports, or make
* modifications to them. Note that you may not assume that any other ports have been attached
* already. If you have dependencies on other ports, override
* {@link #onOutputPortOpen(OutputPort)}. The default implementation does nothing.
*
* @param port The OutputPort instance that was attached.
*/
protected void onOutputPortAttached(OutputPort port) {
}
/**
* Called when an input port is opened on this filter.
* Input ports are opened by the data produce, that is the filter that is connected to an
* input port. Override this if you need to make modifications to the port before processing
* begins. Note, that this is only called if the connected filter is scheduled. You may assume
* that all ports are attached when this is called.
*
* @param port The InputPort instance that was opened.
*/
protected void onInputPortOpen(InputPort port) {
}
/**
* Called when an output port is opened on this filter.
* Output ports are opened when the filter they are attached to is opened. Override this if you
* need to make modifications to the port before processing begins. Note, that this is only
* called if the filter is scheduled. You may assume that all ports are attached when this is
* called.
*
* @param port The OutputPort instance that was opened.
*/
protected void onOutputPortOpen(OutputPort port) {
}
/**
* Returns true, if the filter is currently open.
* @return true, if the filter is currently open.
*/
public final boolean isOpen() {
return mState.check(State.STATE_OPEN);
}
@Override
public String toString() {
return mName + " (" + getClass().getSimpleName() + ")";
}
/**
* Called when filter is prepared.
* Subclasses can override this to prepare the filter for processing. This method gets called
* once only just before the filter is scheduled for processing the first time.
*
* @see #onTearDown()
*/
protected void onPrepare() {
}
/**
* Called when the filter is opened.
* Subclasses can override this to perform any kind of initialization just before processing
* starts. This method may be called any number of times, but is always balanced with an
* {@link #onClose()} call.
*
* @see #onClose()
*/
protected void onOpen() {
}
/**
* Called to perform processing on Frame data.
* This is the only method subclasses must override. It is called every time the filter is
* ready for processing. Typically this is when there is input data to process and available
* output ports, but may differ depending on the port configuration.
*/
protected abstract void onProcess();
/**
* Called when the filter is closed.
* Subclasses can override this to perform any kind of post-processing steps. Processing will
* not resume until {@link #onOpen()} is called again. This method is only called if the filter
* is open.
*
* @see #onOpen()
*/
protected void onClose() {
}
/**
* Called when the filter is torn down.
* Subclasses can override this to perform clean-up tasks just before the filter is disposed of.
* It is called when the filter graph that the filter belongs to is disposed.
*
* @see #onPrepare()
*/
protected void onTearDown() {
}
/**
* Check if the input conditions are met in order to schedule this filter.
*
* This is used by {@link #canSchedule()} to determine if the input-port conditions given by
* the filter are met. Subclasses that override scheduling behavior can make use of this
* function.
*
* @return true, if the filter's input conditions are met.
*/
protected boolean inputConditionsMet() {
if (mConnectedInputPortArray.length > 0) {
int inputFrames = 0;
// [Non-iterator looping]
for (int i = 0; i < mConnectedInputPortArray.length; ++i) {
if (!mConnectedInputPortArray[i].conditionsMet()) {
return false;
} else if (mConnectedInputPortArray[i].hasFrame()) {
++inputFrames;
}
}
if (inputFrames < mMinimumAvailableInputs) {
return false;
}
}
return true;
}
/**
* Check if the output conditions are met in order to schedule this filter.
*
* This is used by {@link #canSchedule()} to determine if the output-port conditions given by
* the filter are met. Subclasses that override scheduling behavior can make use of this
* function.
*
* @return true, if the filter's output conditions are met.
*/
protected boolean outputConditionsMet() {
if (mConnectedOutputPortArray.length > 0) {
int availableOutputs = 0;
for (int i = 0; i < mConnectedOutputPortArray.length; ++i) {
if (!mConnectedOutputPortArray[i].conditionsMet()) {
return false;
} else if (mConnectedOutputPortArray[i].isAvailable()) {
++availableOutputs;
}
}
if (availableOutputs < mMinimumAvailableOutputs) {
return false;
}
}
return true;
}
/**
* Check if the Filter is in a state so that it can be scheduled.
*
* When overriding the filter's {@link #canSchedule()} method, you should never allow
* scheduling a filter that is not in a schedulable state. This will result in undefined
* behavior.
*
* @return true, if the filter is in a schedulable state.
*/
protected boolean inSchedulableState() {
return (mIsActive && !mState.check(State.STATE_CLOSED));
}
/**
* Returns true if the filter can be currently scheduled.
*
* Filters may override this method if they depend on custom factors that determine whether
* they can be scheduled or not. The scheduler calls this method to determine whether or not
* a filter can be scheduled for execution. It does not guarantee that it will be executed.
* It is strongly recommended to call super's implementation to make sure your filter can be
* scheduled based on its state, input and output ports.
*
* @return true, if the filter can be scheduled.
*/
protected boolean canSchedule() {
return inSchedulableState() && inputConditionsMet() && outputConditionsMet();
}
/**
* Returns the current FrameManager instance.
* @return the current FrameManager instance or null if there is no FrameManager set up yet.
*/
protected final FrameManager getFrameManager() {
return mFilterGraph.mRunner != null ? mFilterGraph.mRunner.getFrameManager() : null;
}
/**
* Returns whether the GraphRunner for this filter is running.
*
* Generally, this method should not be used for performing operations that need to be carried
* out before running begins. Use {@link #performPreparation(Runnable)} for this.
*
* @return true, if the GraphRunner for this filter is running.
*/
protected final boolean isRunning() {
return mFilterGraph != null && mFilterGraph.mRunner != null
&& mFilterGraph.mRunner.isRunning();
}
/**
* Performs operations before the filter is running.
*
* Use this method when your filter requires to perform operations while the graph is not
* running. The filter will not be scheduled for execution until your method has completed
* execution.
*/
protected final boolean performPreparation(Runnable runnable) {
synchronized (mState) {
if (mState.current == State.STATE_OPEN) {
return false;
} else {
runnable.run();
return true;
}
}
}
/**
* Request that this filter be closed after the current processing step.
*
* Implementations may call this within their {@link #onProcess()} calls to indicate that the
* filter is done processing and wishes to be closed. After such a request the filter will be
* closed and no longer receive {@link #onProcess()} calls.
*
* @see #onClose()
* @see #onProcess()
*/
protected final void requestClose() {
mRequests |= REQUEST_FLAG_CLOSE;
}
/**
* Sets the minimum number of input frames required to process.
* A filter will not be scheduled unless at least a certain number of input frames are available
* on the input ports. This is only relevant if the filter has input ports and is not waiting on
* all ports.
* The default value is 1.
*
* @param count the minimum number of frames required to process.
* @see #getMinimumAvailableInputs()
* @see #setMinimumAvailableOutputs(int)
* @see InputPort#setWaitsForFrame(boolean)
*/
protected final void setMinimumAvailableInputs(int count) {
mMinimumAvailableInputs = count;
}
/**
* Returns the minimum number of input frames required to process this filter.
* The default value is 1.
*
* @return the minimum number of input frames required to process.
* @see #setMinimumAvailableInputs(int)
*/
protected final int getMinimumAvailableInputs() {
return mMinimumAvailableInputs;
}
/**
* Sets the minimum number of available output ports required to process.
* A filter will not be scheduled unless atleast a certain number of output ports are available.
* This is only relevant if the filter has output ports and is not waiting on all ports. The
* default value is 1.
*
* @param count the minimum number of frames required to process.
* @see #getMinimumAvailableOutputs()
* @see #setMinimumAvailableInputs(int)
* @see OutputPort#setWaitsUntilAvailable(boolean)
*/
protected final void setMinimumAvailableOutputs(int count) {
mMinimumAvailableOutputs = count;
}
/**
* Returns the minimum number of available outputs required to process this filter.
* The default value is 1.
*
* @return the minimum number of available outputs required to process.
* @see #setMinimumAvailableOutputs(int)
*/
protected final int getMinimumAvailableOutputs() {
return mMinimumAvailableOutputs;
}
/**
* Puts the filter to sleep so that it is no longer scheduled.
* To resume scheduling the filter another thread must call wakeUp() on this filter.
*/
protected final void enterSleepState() {
mIsSleeping.set(true);
}
/**
* Wakes the filter and resumes scheduling.
* This is generally called from another thread to signal that this filter should resume
* processing. Does nothing if filter is not sleeping.
*/
protected final void wakeUp() {
if (mIsSleeping.getAndSet(false)) {
if (isRunning()) {
mFilterGraph.mRunner.signalWakeUp();
}
}
}
/**
* Returns whether this Filter is allowed to use OpenGL.
*
* Filters may use OpenGL if the MffContext supports OpenGL and its GraphRunner allows it.
*
* @return true, if this Filter is allowed to use OpenGL.
*/
protected final boolean isOpenGLSupported() {
return mFilterGraph.mRunner.isOpenGLSupported();
}
/**
* Connect an output port to an input port of another filter.
* Connects the output port with the specified name to the input port with the specified name
* of the specified filter. If the input or output ports do not exist already, they are
* automatically created and added to the respective filter.
*/
final void connect(String outputName, Filter targetFilter, String inputName) {
// Make sure not connected already
if (getConnectedOutputPort(outputName) != null) {
throw new RuntimeException("Attempting to connect already connected output port '"
+ outputName + "' of filter " + this + "'!");
} else if (targetFilter.getConnectedInputPort(inputName) != null) {
throw new RuntimeException("Attempting to connect already connected input port '"
+ inputName + "' of filter " + targetFilter + "'!");
}
// Establish connection
InputPort inputPort = targetFilter.newInputPort(inputName);
OutputPort outputPort = newOutputPort(outputName);
outputPort.setTarget(inputPort);
// Fire attachment callbacks
targetFilter.onInputPortAttached(inputPort);
onOutputPortAttached(outputPort);
// Update array of ports (which is maintained for more efficient access)
updatePortArrays();
}
final Map<String, InputPort> getConnectedInputPortMap() {
return mConnectedInputPorts;
}
final Map<String, OutputPort> getConnectedOutputPortMap() {
return mConnectedOutputPorts;
}
final void execute() {
synchronized (mState) {
autoPullInputs();
mLastScheduleTime = SystemClock.elapsedRealtime();
if (mState.current == State.STATE_UNPREPARED) {
onPrepare();
mState.current = State.STATE_PREPARED;
}
if (mState.current == State.STATE_PREPARED) {
openPorts();
onOpen();
mState.current = State.STATE_OPEN;
}
if (mState.current == State.STATE_OPEN) {
onProcess();
if (mRequests != REQUEST_FLAG_NONE) {
processRequests();
}
}
}
autoReleaseFrames();
++mScheduleCount;
}
final void performClose() {
synchronized (mState) {
if (mState.current == State.STATE_OPEN) {
onClose();
mIsSleeping.set(false);
mState.current = State.STATE_CLOSED;
mCurrentTimestamp = Frame.TIMESTAMP_NOT_SET;
}
}
}
final void softReset() {
synchronized (mState) {
performClose();
if (mState.current == State.STATE_CLOSED) {
mState.current = State.STATE_PREPARED;
}
}
}
final void performTearDown() {
synchronized (mState) {
if (mState.current == State.STATE_OPEN) {
throw new RuntimeException("Attempting to tear-down filter " + this + " which is "
+ "in an open state!");
} else if (mState.current != State.STATE_DESTROYED
&& mState.current != State.STATE_UNPREPARED) {
onTearDown();
mState.current = State.STATE_DESTROYED;
}
}
}
final void insertIntoFilterGraph(FilterGraph graph) {
mFilterGraph = graph;
updatePortArrays();
}
final int getScheduleCount() {
return mScheduleCount;
}
final void resetScheduleCount() {
mScheduleCount = 0;
}
final void openPorts() {
// Opening the output ports will open the connected input ports
for (OutputPort outputPort : mConnectedOutputPorts.values()) {
openOutputPort(outputPort);
}
}
final void addAutoReleaseFrame(Frame frame) {
mAutoReleaseFrames.add(frame);
}
final long getCurrentTimestamp() {
return mCurrentTimestamp;
}
final void onPulledFrameWithTimestamp(long timestamp) {
if (timestamp > mCurrentTimestamp || mCurrentTimestamp == Frame.TIMESTAMP_NOT_SET) {
mCurrentTimestamp = timestamp;
}
}
final void openOutputPort(OutputPort outPort) {
if (outPort.getQueue() == null) {
try {
FrameQueue.Builder builder = new FrameQueue.Builder();
InputPort inPort = outPort.getTarget();
outPort.onOpen(builder);
inPort.onOpen(builder);
Filter targetFilter = inPort.getFilter();
String queueName = mName + "[" + outPort.getName() + "] -> " + targetFilter.mName
+ "[" + inPort.getName() + "]";
FrameQueue queue = builder.build(queueName);
outPort.setQueue(queue);
inPort.setQueue(queue);
} catch (RuntimeException e) {
throw new RuntimeException("Could not open output port " + outPort + "!", e);
}
}
}
final boolean isSleeping() {
return mIsSleeping.get();
}
final long getLastScheduleTime() {
return mLastScheduleTime ;
}
private final void autoPullInputs() {
// [Non-iterator looping]
for (int i = 0; i < mConnectedInputPortArray.length; ++i) {
InputPort port = mConnectedInputPortArray[i];
if (port.hasFrame() && port.isAutoPullEnabled()) {
mConnectedInputPortArray[i].pullFrame();
}
}
}
private final void autoReleaseFrames() {
// [Non-iterator looping]
for (int i = 0; i < mAutoReleaseFrames.size(); ++i) {
mAutoReleaseFrames.get(i).release();
}
mAutoReleaseFrames.clear();
}
private final InputPort newInputPort(String name) {
InputPort result = mConnectedInputPorts.get(name);
if (result == null) {
Signature.PortInfo info = getSignature().getInputPortInfo(name);
result = new InputPort(this, name, info);
mConnectedInputPorts.put(name, result);
}
return result;
}
private final OutputPort newOutputPort(String name) {
OutputPort result = mConnectedOutputPorts.get(name);
if (result == null) {
Signature.PortInfo info = getSignature().getOutputPortInfo(name);
result = new OutputPort(this, name, info);
mConnectedOutputPorts.put(name, result);
}
return result;
}
private final void processRequests() {
if ((mRequests & REQUEST_FLAG_CLOSE) != 0) {
performClose();
mRequests = REQUEST_FLAG_NONE;
}
}
private void assertIsPaused() {
GraphRunner runner = GraphRunner.current();
if (runner != null && !runner.isPaused() && !runner.isStopped()) {
throw new RuntimeException("Attempting to modify filter state while runner is "
+ "executing. Please pause or stop the runner first!");
}
}
private final void updatePortArrays() {
// Copy our port-maps to arrays for faster non-iterator access
mConnectedInputPortArray = mConnectedInputPorts.values().toArray(new InputPort[0]);
mConnectedOutputPortArray = mConnectedOutputPorts.values().toArray(new OutputPort[0]);
}
}

View File

@ -0,0 +1,150 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.util.Log;
import dalvik.system.PathClassLoader;
import java.lang.reflect.Constructor;
import java.util.HashSet;
public class FilterFactory {
private static FilterFactory mSharedFactory;
private HashSet<String> mPackages = new HashSet<String>();
private static ClassLoader mCurrentClassLoader;
private static HashSet<String> mLibraries;
private static Object mClassLoaderGuard;
static {
mCurrentClassLoader = Thread.currentThread().getContextClassLoader();
mLibraries = new HashSet<String>();
mClassLoaderGuard = new Object();
}
private static final String TAG = "FilterFactory";
private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
public static FilterFactory sharedFactory() {
if (mSharedFactory == null) {
mSharedFactory = new FilterFactory();
}
return mSharedFactory;
}
/**
* Adds a new Java library to the list to be scanned for filters.
* libraryPath must be an absolute path of the jar file. This needs to be
* static because only one classloader per process can open a shared native
* library, which a filter may well have.
*/
public static void addFilterLibrary(String libraryPath) {
if (mLogVerbose) Log.v(TAG, "Adding filter library " + libraryPath);
synchronized(mClassLoaderGuard) {
if (mLibraries.contains(libraryPath)) {
if (mLogVerbose) Log.v(TAG, "Library already added");
return;
}
mLibraries.add(libraryPath);
// Chain another path loader to the current chain
mCurrentClassLoader = new PathClassLoader(libraryPath, mCurrentClassLoader);
}
}
public void addPackage(String packageName) {
if (mLogVerbose) Log.v(TAG, "Adding package " + packageName);
/* TODO: This should use a getPackage call in the caller's context, but no such method
exists.
Package pkg = Package.getPackage(packageName);
if (pkg == null) {
throw new IllegalArgumentException("Unknown filter package '" + packageName + "'!");
}
*/
mPackages.add(packageName);
}
public boolean isFilterAvailable(String className) {
return getFilterClass(className) != null;
}
public Filter createFilterByClassName(String className, String filterName, MffContext context) {
if (mLogVerbose) Log.v(TAG, "Looking up class " + className);
Class<? extends Filter> filterClass = getFilterClass(className);
if (filterClass == null) {
throw new IllegalArgumentException("Unknown filter class '" + className + "'!");
}
return createFilterByClass(filterClass, filterName, context);
}
public Filter createFilterByClass(Class<? extends Filter> filterClass,
String filterName, MffContext context) {
// Look for the correct constructor
Constructor<? extends Filter> filterConstructor = null;
try {
filterConstructor = filterClass.getConstructor(MffContext.class, String.class);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("The filter class '" + filterClass
+ "' does not have a constructor of the form <init>(MffContext, String)!");
}
// Construct the filter
Filter filter = null;
try {
filter = filterConstructor.newInstance(context, filterName);
} catch (Throwable t) {
throw new RuntimeException("Error creating filter " + filterName + "!", t);
}
if (filter == null) {
throw new IllegalArgumentException("Could not construct the filter '"
+ filterName + "'!");
}
return filter;
}
private Class<? extends Filter> getFilterClass(String name) {
Class<?> filterClass = null;
// Look for the class in the imported packages
for (String packageName : mPackages) {
try {
if (mLogVerbose) Log.v(TAG, "Trying "+ packageName + "." + name);
synchronized(mClassLoaderGuard) {
filterClass = mCurrentClassLoader.loadClass(packageName + "." + name);
}
} catch (ClassNotFoundException e) {
continue;
}
// Exit loop if class was found.
if (filterClass != null) {
break;
}
}
Class<? extends Filter> result = null;
try {
if (filterClass != null) {
result = filterClass.asSubclass(Filter.class);
}
} catch (ClassCastException e) {
// Leave result == null
}
return result;
}
}

View File

@ -0,0 +1,567 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.util.Log;
import android.view.View;
import androidx.media.filterpacks.base.BranchFilter;
import androidx.media.filterpacks.base.FrameSlotSource;
import androidx.media.filterpacks.base.FrameSlotTarget;
import androidx.media.filterpacks.base.GraphInputSource;
import androidx.media.filterpacks.base.GraphOutputTarget;
import androidx.media.filterpacks.base.ValueTarget;
import androidx.media.filterpacks.base.ValueTarget.ValueListener;
import androidx.media.filterpacks.base.VariableSource;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.Set;
/**
* A graph of Filter nodes.
*
* A FilterGraph instance contains a set of Filter instances connected by their output and input
* ports. Every filter belongs to exactly one graph and cannot be moved to another graph.
*
* FilterGraphs may contain sub-graphs that are dependent on the parent graph. These are typically
* used when inserting sub-graphs into MetaFilters. When a parent graph is torn down so are its
* sub-graphs. The same applies to flushing frames of a graph.
*/
public class FilterGraph {
private final static boolean DEBUG = false;
/** The context that this graph lives in */
private MffContext mContext;
/** Map from name of filter to the filter instance */
private HashMap<String, Filter> mFilterMap = new HashMap<String, Filter>();
/** Allows quick access to array of all filters. */
private Filter[] mAllFilters = null;
/** The GraphRunner currently attached to this graph */
GraphRunner mRunner;
/** The set of sub-graphs of this graph */
HashSet<FilterGraph> mSubGraphs = new HashSet<FilterGraph>();
/** The parent graph of this graph, or null it this graph is a root graph. */
private FilterGraph mParentGraph;
public static class Builder {
/** The context that this builder lives in */
private MffContext mContext;
/** Map from name of filter to the filter instance */
private HashMap<String, Filter> mFilterMap = new HashMap<String, Filter>();
/**
* Creates a new builder for specifying a graph structure.
* @param context The context the graph will live in.
*/
public Builder(MffContext context) {
mContext = context;
}
/**
* Add a filter to the graph.
*
* Adds the specified filter to the set of filters of this graph. The filter must not be in
* the graph already, and the filter's name must be unique within the graph.
*
* @param filter the filter to add to the graph.
* @throws IllegalArgumentException if the filter is in the graph already, or its name is
* is already taken.
*/
public void addFilter(Filter filter) {
if (mFilterMap.values().contains(filter)) {
throw new IllegalArgumentException("Attempting to add filter " + filter + " that "
+ "is in the graph already!");
} else if (mFilterMap.containsKey(filter.getName())) {
throw new IllegalArgumentException("Graph contains filter with name '"
+ filter.getName() + "' already!");
} else {
mFilterMap.put(filter.getName(), filter);
}
}
/**
* Adds a variable to the graph.
*
* TODO: More documentation.
*
* @param name the name of the variable.
* @param value the value of the variable or null if no value is to be set yet.
* @return the VariableSource filter that holds the value of this variable.
*/
public VariableSource addVariable(String name, Object value) {
if (getFilter(name) != null) {
throw new IllegalArgumentException("Filter named '" + name + "' exists already!");
}
VariableSource valueSource = new VariableSource(mContext, name);
addFilter(valueSource);
if (value != null) {
valueSource.setValue(value);
}
return valueSource;
}
public FrameSlotSource addFrameSlotSource(String name, String slotName) {
FrameSlotSource filter = new FrameSlotSource(mContext, name, slotName);
addFilter(filter);
return filter;
}
public FrameSlotTarget addFrameSlotTarget(String name, String slotName) {
FrameSlotTarget filter = new FrameSlotTarget(mContext, name, slotName);
addFilter(filter);
return filter;
}
/**
* Connect two filters by their ports.
* The filters specified must have been previously added to the graph builder.
*
* @param sourceFilterName The name of the source filter.
* @param sourcePort The name of the source port.
* @param targetFilterName The name of the target filter.
* @param targetPort The name of the target port.
*/
public void connect(String sourceFilterName, String sourcePort,
String targetFilterName, String targetPort) {
Filter sourceFilter = getFilter(sourceFilterName);
Filter targetFilter = getFilter(targetFilterName);
if (sourceFilter == null) {
throw new IllegalArgumentException("Unknown filter '" + sourceFilterName + "'!");
} else if (targetFilter == null) {
throw new IllegalArgumentException("Unknown filter '" + targetFilterName + "'!");
}
connect(sourceFilter, sourcePort, targetFilter, targetPort);
}
/**
* Connect two filters by their ports.
* The filters specified must have been previously added to the graph builder.
*
* @param sourceFilter The source filter.
* @param sourcePort The name of the source port.
* @param targetFilter The target filter.
* @param targetPort The name of the target port.
*/
public void connect(Filter sourceFilter, String sourcePort,
Filter targetFilter, String targetPort) {
sourceFilter.connect(sourcePort, targetFilter, targetPort);
}
/**
* Returns the filter with the specified name.
*
* @return the filter with the specified name, or null if no such filter exists.
*/
public Filter getFilter(String name) {
return mFilterMap.get(name);
}
/**
* Builds the graph and checks signatures.
*
* @return The new graph instance.
*/
public FilterGraph build() {
checkSignatures();
return buildWithParent(null);
}
/**
* Builds the sub-graph and checks signatures.
*
* @param parentGraph the parent graph of the built sub-graph.
* @return The new graph instance.
*/
public FilterGraph buildSubGraph(FilterGraph parentGraph) {
if (parentGraph == null) {
throw new NullPointerException("Parent graph must be non-null!");
}
checkSignatures();
return buildWithParent(parentGraph);
}
VariableSource assignValueToFilterInput(Object value, String filterName, String inputName) {
// Get filter to connect to
Filter filter = getFilter(filterName);
if (filter == null) {
throw new IllegalArgumentException("Unknown filter '" + filterName + "'!");
}
// Construct a name for our value source and make sure it does not exist already
String valueSourceName = filterName + "." + inputName;
if (getFilter(valueSourceName) != null) {
throw new IllegalArgumentException("VariableSource for '" + filterName + "' and "
+ "input '" + inputName + "' exists already!");
}
// Create new VariableSource and connect it to the target filter and port
VariableSource valueSource = new VariableSource(mContext, valueSourceName);
addFilter(valueSource);
try {
((Filter)valueSource).connect("value", filter, inputName);
} catch (RuntimeException e) {
throw new RuntimeException("Could not connect VariableSource to input '" + inputName
+ "' of filter '" + filterName + "'!", e);
}
// Assign the value to the VariableSource
if (value != null) {
valueSource.setValue(value);
}
return valueSource;
}
VariableSource assignVariableToFilterInput(String varName,
String filterName,
String inputName) {
// Get filter to connect to
Filter filter = getFilter(filterName);
if (filter == null) {
throw new IllegalArgumentException("Unknown filter '" + filterName + "'!");
}
// Get variable
Filter variable = getFilter(varName);
if (variable == null || !(variable instanceof VariableSource)) {
throw new IllegalArgumentException("Unknown variable '" + varName + "'!");
}
// Connect variable (and possibly branch) variable to filter
try {
connectAndBranch(variable, "value", filter, inputName);
} catch (RuntimeException e) {
throw new RuntimeException("Could not connect VariableSource to input '" + inputName
+ "' of filter '" + filterName + "'!", e);
}
return (VariableSource)variable;
}
/**
* Builds the graph without checking signatures.
* If parent is non-null, build a sub-graph of the specified parent.
*
* @return The new graph instance.
*/
private FilterGraph buildWithParent(FilterGraph parent) {
FilterGraph graph = new FilterGraph(mContext, parent);
graph.mFilterMap = mFilterMap;
graph.mAllFilters = mFilterMap.values().toArray(new Filter[0]);
for (Entry<String, Filter> filterEntry : mFilterMap.entrySet()) {
filterEntry.getValue().insertIntoFilterGraph(graph);
}
return graph;
}
private void checkSignatures() {
checkSignaturesForFilters(mFilterMap.values());
}
// TODO: Currently this always branches even if the connection is a 1:1 connection. Later
// we may optimize to pass through directly in the 1:1 case (may require disconnecting
// ports).
private void connectAndBranch(Filter sourceFilter,
String sourcePort,
Filter targetFilter,
String targetPort) {
String branchName = "__" + sourceFilter.getName() + "_" + sourcePort + "Branch";
Filter branch = getFilter(branchName);
if (branch == null) {
branch = new BranchFilter(mContext, branchName, false);
addFilter(branch);
sourceFilter.connect(sourcePort, branch, "input");
}
String portName = "to" + targetFilter.getName() + "_" + targetPort;
branch.connect(portName, targetFilter, targetPort);
}
}
/**
* Attach the graph and its subgraphs to a custom GraphRunner.
*
* Call this if you want the graph to be executed by a specific GraphRunner. You must call
* this before any other runner is set. Note that calls to {@code getRunner()} and
* {@code run()} auto-create a GraphRunner.
*
* @param runner The GraphRunner instance that should execute this graph.
* @see #getRunner()
* @see #run()
*/
public void attachToRunner(GraphRunner runner) {
if (mRunner == null) {
for (FilterGraph subGraph : mSubGraphs) {
subGraph.attachToRunner(runner);
}
runner.attachGraph(this);
mRunner = runner;
} else if (mRunner != runner) {
throw new RuntimeException("Cannot attach FilterGraph to GraphRunner that is already "
+ "attached to another GraphRunner!");
}
}
/**
* Forcibly tear down a filter graph.
*
* Call this to release any resources associated with the filter graph, its filters and any of
* its sub-graphs. This method must not be called if the graph (or any sub-graph) is running.
*
* You may no longer access this graph instance or any of its subgraphs after calling this
* method.
*
* Tearing down of sub-graphs is not supported. You must tear down the root graph, which will
* tear down all of its sub-graphs.
*
* @throws IllegalStateException if the graph is still running.
* @throws RuntimeException if you attempt to tear down a sub-graph.
*/
public void tearDown() {
assertNotRunning();
if (mParentGraph != null) {
throw new RuntimeException("Attempting to tear down sub-graph!");
}
if (mRunner != null) {
mRunner.tearDownGraph(this);
}
for (FilterGraph subGraph : mSubGraphs) {
subGraph.mParentGraph = null;
subGraph.tearDown();
}
mSubGraphs.clear();
}
/**
* Returns the context of the graph.
*
* @return the MffContext instance that this graph is bound to.
*/
public MffContext getContext() {
return mContext;
}
/**
* Returns the filter with the specified name.
*
* @return the filter with the specified name, or null if no such filter exists.
*/
public Filter getFilter(String name) {
return mFilterMap.get(name);
}
/**
* Returns the VariableSource for the specified variable.
*
* TODO: More documentation.
* TODO: More specialized error handling.
*
* @param name The name of the VariableSource.
* @return The VariableSource filter instance with the specified name.
*/
public VariableSource getVariable(String name) {
Filter result = mFilterMap.get(name);
if (result != null && result instanceof VariableSource) {
return (VariableSource)result;
} else {
throw new IllegalArgumentException("Unknown variable '" + name + "' specified!");
}
}
/**
* Returns the GraphOutputTarget with the specified name.
*
* @param name The name of the target.
* @return The GraphOutputTarget instance with the specified name.
*/
public GraphOutputTarget getGraphOutput(String name) {
Filter result = mFilterMap.get(name);
if (result != null && result instanceof GraphOutputTarget) {
return (GraphOutputTarget)result;
} else {
throw new IllegalArgumentException("Unknown target '" + name + "' specified!");
}
}
/**
* Returns the GraphInputSource with the specified name.
*
* @param name The name of the source.
* @return The GraphInputSource instance with the specified name.
*/
public GraphInputSource getGraphInput(String name) {
Filter result = mFilterMap.get(name);
if (result != null && result instanceof GraphInputSource) {
return (GraphInputSource)result;
} else {
throw new IllegalArgumentException("Unknown source '" + name + "' specified!");
}
}
/**
* Binds a filter to a view.
*
* ViewFilter instances support visualizing their data to a view. See the specific filter
* documentation for details. Views may be bound only if the graph is not running.
*
* @param filterName the name of the filter to bind.
* @param view the view to bind to.
* @throws IllegalStateException if the filter is in an illegal state.
* @throws IllegalArgumentException if no such view-filter exists.
*/
public void bindFilterToView(String filterName, View view) {
Filter filter = mFilterMap.get(filterName);
if (filter != null && filter instanceof ViewFilter) {
((ViewFilter)filter).bindToView(view);
} else {
throw new IllegalArgumentException("Unknown view filter '" + filterName + "'!");
}
}
/**
* TODO: Documentation.
*/
public void bindValueTarget(String filterName, ValueListener listener, boolean onCallerThread) {
Filter filter = mFilterMap.get(filterName);
if (filter != null && filter instanceof ValueTarget) {
((ValueTarget)filter).setListener(listener, onCallerThread);
} else {
throw new IllegalArgumentException("Unknown ValueTarget filter '" + filterName + "'!");
}
}
// Running Graphs //////////////////////////////////////////////////////////////////////////////
/**
* Convenience method to run the graph.
*
* Creates a new runner for this graph in the specified mode and executes it. Returns the
* runner to allow control of execution.
*
* @throws IllegalStateException if the graph is already running.
* @return the GraphRunner instance that was used for execution.
*/
public GraphRunner run() {
GraphRunner runner = getRunner();
runner.setIsVerbose(false);
runner.start(this);
return runner;
}
/**
* Returns the GraphRunner for this graph.
*
* Every FilterGraph instance has a GraphRunner instance associated with it for executing the
* graph.
*
* @return the GraphRunner instance for this graph.
*/
public GraphRunner getRunner() {
if (mRunner == null) {
GraphRunner runner = new GraphRunner(mContext);
attachToRunner(runner);
}
return mRunner;
}
/**
* Returns whether the graph is currently running.
*
* @return true if the graph is currently running.
*/
public boolean isRunning() {
return mRunner != null && mRunner.isRunning();
}
/**
* Check each filter's signatures if all requirements are fulfilled.
*
* This will throw a RuntimeException if any unfulfilled requirements are found.
* Note that FilterGraph.Builder also has a function checkSignatures(), which allows
* to do the same /before/ the FilterGraph is built.
*/
public void checkSignatures() {
checkSignaturesForFilters(mFilterMap.values());
}
// MFF Internal Methods ////////////////////////////////////////////////////////////////////////
Filter[] getAllFilters() {
return mAllFilters;
}
static void checkSignaturesForFilters(Collection<Filter> filters) {
for (Filter filter : filters) {
if (DEBUG) {
Log.d("FilterGraph", "Checking filter " + filter.getName() + "...");
}
Signature signature = filter.getSignature();
signature.checkInputPortsConform(filter);
signature.checkOutputPortsConform(filter);
}
}
/**
* Wipes the filter references in this graph, so that they may be collected.
*
* This must be called only after a tearDown as this will make the FilterGraph invalid.
*/
void wipe() {
mAllFilters = null;
mFilterMap = null;
}
void flushFrames() {
for (Filter filter : mFilterMap.values()) {
for (InputPort inputPort : filter.getConnectedInputPorts()) {
inputPort.clear();
}
for (OutputPort outputPort : filter.getConnectedOutputPorts()) {
outputPort.clear();
}
}
}
Set<FilterGraph> getSubGraphs() {
return mSubGraphs;
}
// Internal Methods ////////////////////////////////////////////////////////////////////////////
private FilterGraph(MffContext context, FilterGraph parentGraph) {
mContext = context;
mContext.addGraph(this);
if (parentGraph != null) {
mParentGraph = parentGraph;
mParentGraph.mSubGraphs.add(this);
}
}
private void assertNotRunning() {
if (isRunning()) {
throw new IllegalStateException("Attempting to modify running graph!");
}
}
}

View File

@ -0,0 +1,203 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import java.util.Arrays;
/**
* Frames are the data containers that are transported between Filters.
*
* Frames may be used only within a Filter during filter graph execution. Accessing Frames outside
* of graph execution may cause unexpected results.
*
* There are two ways to obtain new Frame instances. You can call
* {@link OutputPort#fetchAvailableFrame(int[])} on an OutputPort to obtain a Frame to pass to an
* output. You can also call {@link #create(FrameType, int[])} to obtain
* a detached Frame instance that you may hold onto in your filter. If you need to hold on to a
* Frame that is owned by an input or output queue, you must call
* {@link #retain()} on it.
*
* When you are done using a detached Frame, you must release it yourself.
*
* To access frame data, call any of the {@code lock}-methods. This will give you access to the
* frame data in the desired format. You must pass in a {@code mode} indicating whether you wish
* to read or write to the data. Writing to a read-locked Frame may produce unexpected results and
* interfere with other filters. When you are done reading or writing to the data, you must call
* {@link #unlock()}. Note, that a Frame must be unlocked before you push it into an output queue.
*
* Generally, any type of access format to a Frame's data will be granted. However, it is strongly
* recommended to specify the access format that you intend to use in your filter's signature or
* in the access flags passed to {@code newFrame()}. This will allow the Frame to allocate
* the most efficient backings for the intended type of access.
*
* A frame can be be pushed to an OutputPort by calling the {@link OutputPort#pushFrame(Frame)}
* method. Frames that have been pushed become read-only, and can no longer be modified.
*
* On the other end, a Filter can pull in an input Frame by calling {@link InputPort#pullFrame()}
* on the desired InputPort. Such frames are always read-only.
*/
public class Frame {
/** Special timestamp value indicating that no time-stamp was set. */
public static final long TIMESTAMP_NOT_SET = -1;
/** Frame data access mode: Read */
public static final int MODE_READ = 1;
/** Frame data access mode: Write */
public static final int MODE_WRITE = 2;
BackingStore mBackingStore;
boolean mReadOnly = false;
// Public API //////////////////////////////////////////////////////////////////////////////////
/**
* Returns the frame's type.
* @return A FrameType instance describing the frame data-type.
*/
public final FrameType getType() {
return mBackingStore.getFrameType();
}
public final int getElementCount() {
return mBackingStore.getElementCount();
}
/**
* Set the frame's timestamp in nanoseconds.
*
* @param timestamp the timestamp of this frame in nanoseconds.
*/
public final void setTimestamp(long timestamp) {
mBackingStore.setTimestamp(timestamp);
}
/**
* @return the frame's timestamp in nanoseconds.
*/
public final long getTimestamp() {
return mBackingStore.getTimestamp();
}
/**
* @return the frame's timestamp in milliseconds.
*/
public final long getTimestampMillis() {
return mBackingStore.getTimestamp() / 1000000L;
}
public final boolean isReadOnly() {
return mReadOnly;
}
public final FrameValue asFrameValue() {
return FrameValue.create(mBackingStore);
}
public final FrameValues asFrameValues() {
return FrameValues.create(mBackingStore);
}
public final FrameBuffer1D asFrameBuffer1D() {
return FrameBuffer1D.create(mBackingStore);
}
public final FrameBuffer2D asFrameBuffer2D() {
return FrameBuffer2D.create(mBackingStore);
}
public final FrameImage2D asFrameImage2D() {
return FrameImage2D.create(mBackingStore);
}
@Override
public String toString() {
return "Frame[" + getType().toString() + "]: " + mBackingStore;
}
@Override
public boolean equals(Object object) {
return object instanceof Frame && ((Frame)object).mBackingStore == mBackingStore;
}
public static Frame create(FrameType type, int[] dimensions) {
FrameManager manager = FrameManager.current();
if (manager == null) {
throw new IllegalStateException("Attempting to create new Frame outside of "
+ "FrameManager context!");
}
return new Frame(type, dimensions, manager);
}
public final Frame release() {
mBackingStore = mBackingStore.release();
return mBackingStore != null ? this : null;
}
public final Frame retain() {
mBackingStore = mBackingStore.retain();
return this;
}
public void unlock() {
if (!mBackingStore.unlock()) {
throw new RuntimeException("Attempting to unlock frame that is not locked!");
}
}
public int[] getDimensions() {
int[] dim = mBackingStore.getDimensions();
return dim != null ? Arrays.copyOf(dim, dim.length) : null;
}
Frame(FrameType type, int[] dimensions, FrameManager manager) {
mBackingStore = new BackingStore(type, dimensions, manager);
}
Frame(BackingStore backingStore) {
mBackingStore = backingStore;
}
final void assertAccessible(int mode) {
// Make sure frame is in write-mode
if (mReadOnly && mode == MODE_WRITE) {
throw new RuntimeException("Attempting to write to read-only frame " + this + "!");
}
}
final void setReadOnly(boolean readOnly) {
mReadOnly = readOnly;
}
void resize(int[] newDims) {
int[] oldDims = mBackingStore.getDimensions();
int oldCount = oldDims == null ? 0 : oldDims.length;
int newCount = newDims == null ? 0 : newDims.length;
if (oldCount != newCount) {
throw new IllegalArgumentException("Cannot resize " + oldCount + "-dimensional "
+ "Frame to " + newCount + "-dimensional Frame!");
} else if (newDims != null && !Arrays.equals(oldDims, newDims)) {
mBackingStore.resize(newDims);
}
}
Frame makeCpuCopy(FrameManager frameManager) {
Frame frame = new Frame(getType(), getDimensions(), frameManager);
frame.mBackingStore.importStore(mBackingStore);
return frame;
}
}

View File

@ -0,0 +1,103 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.annotation.TargetApi;
import android.renderscript.Allocation;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class FrameBuffer1D extends Frame {
private int mLength = 0;
/**
* Access frame's data using a {@link ByteBuffer}.
* This is a convenience method and is equivalent to calling {@code lockData} with an
* {@code accessFormat} of {@code ACCESS_BYTES}.
* When writing to the {@link ByteBuffer}, the byte order should be always set to
* {@link ByteOrder#nativeOrder()}.
*
* @return The byte buffer instance holding the Frame's data.
*/
public ByteBuffer lockBytes(int mode) {
assertAccessible(mode);
return (ByteBuffer)mBackingStore.lockData(mode, BackingStore.ACCESS_BYTES);
}
/**
* Access frame's data using a RenderScript {@link Allocation}.
* This is a convenience method and is equivalent to calling {@code lockData} with an
* {@code accessFormat} of {@code ACCESS_ALLOCATION}.
*
* @return The Allocation instance holding the Frame's data.
*/
@TargetApi(11)
public Allocation lockAllocation(int mode) {
assertAccessible(mode);
return (Allocation) mBackingStore.lockData(mode, BackingStore.ACCESS_ALLOCATION);
}
public int getLength() {
return mLength;
}
@Override
public int[] getDimensions() {
return super.getDimensions();
}
/**
* TODO: Documentation. Note that frame contents are invalidated.
*/
@Override
public void resize(int[] newDimensions) {
super.resize(newDimensions);
}
static FrameBuffer1D create(BackingStore backingStore) {
assertCanCreate(backingStore);
return new FrameBuffer1D(backingStore);
}
FrameBuffer1D(BackingStore backingStore) {
super(backingStore);
updateLength(backingStore.getDimensions());
}
static void assertCanCreate(BackingStore backingStore) {
FrameType type = backingStore.getFrameType();
if (type.getElementSize() == 0) {
throw new RuntimeException("Cannot access Frame of type " + type + " as a FrameBuffer "
+ "instance!");
}
int[] dims = backingStore.getDimensions();
if (dims == null || dims.length == 0) {
throw new RuntimeException("Cannot access Frame with no dimensions as a FrameBuffer "
+ "instance!");
}
}
void updateLength(int[] dimensions) {
mLength = 1;
for (int dim : dimensions) {
mLength *= dim;
}
}
}

View File

@ -0,0 +1,48 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
public class FrameBuffer2D extends FrameBuffer1D {
public int getWidth() {
return mBackingStore.getDimensions()[0];
}
public int getHeight() {
return mBackingStore.getDimensions()[1];
}
static FrameBuffer2D create(BackingStore backingStore) {
assertCanCreate(backingStore);
return new FrameBuffer2D(backingStore);
}
FrameBuffer2D(BackingStore backingStore) {
super(backingStore);
}
static void assertCanCreate(BackingStore backingStore) {
FrameBuffer1D.assertCanCreate(backingStore);
int[] dimensions = backingStore.getDimensions();
int dimCount = dimensions != null ? dimensions.length : 0;
if (dimCount != 2) {
throw new RuntimeException("Cannot access " + dimCount + "-dimensional Frame as a "
+ "FrameBuffer2D instance!");
}
}
}

View File

@ -0,0 +1,184 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import androidx.media.filterfw.BackingStore.Backing;
public class FrameImage2D extends FrameBuffer2D {
/**
* Access frame's data using a TextureSource.
* This is a convenience method and is equivalent to calling {@code lockData} with an
* {@code accessFormat} of {@code ACCESS_TEXTURE}.
*
* @return The TextureSource instance holding the Frame's data.
*/
public TextureSource lockTextureSource() {
return (TextureSource)mBackingStore.lockData(MODE_READ, BackingStore.ACCESS_TEXTURE);
}
/**
* Access frame's data using a RenderTarget.
* This is a convenience method and is equivalent to calling {@code lockData} with an
* {@code accessFormat} of {@code ACCESS_RENDERTARGET}.
*
* @return The RenderTarget instance holding the Frame's data.
*/
public RenderTarget lockRenderTarget() {
return (RenderTarget)mBackingStore.lockData(MODE_WRITE, BackingStore.ACCESS_RENDERTARGET);
}
/**
* Assigns the pixel data of the specified bitmap.
*
* The RGBA pixel data will be extracted from the bitmap and assigned to the frame data. Note,
* that the colors are premultiplied with the alpha channel. If you wish to have
* non-premultiplied colors, you must pass the Frame through an
* {@code UnpremultiplyAlphaFilter}.
*
* @param bitmap The bitmap pixels to assign.
*/
public void setBitmap(Bitmap bitmap) {
bitmap = convertToFrameType(bitmap, mBackingStore.getFrameType());
validateBitmapSize(bitmap, mBackingStore.getDimensions());
Backing backing = mBackingStore.lockBacking(MODE_WRITE, BackingStore.ACCESS_BITMAP);
backing.setData(bitmap);
mBackingStore.unlock();
}
/**
* Returns the RGBA image contents as a Bitmap instance.
*
* @return a Bitmap instance holding the RGBA Frame image content.
*/
public Bitmap toBitmap() {
Bitmap result = (Bitmap)mBackingStore.lockData(MODE_READ, BackingStore.ACCESS_BITMAP);
mBackingStore.unlock();
return result;
}
/**
* Copies the image data from one frame to another.
*
* The source and target rectangles must be given in normalized coordinates, where 0,0 is the
* top-left of the image and 1,1 is the bottom-right.
*
* If the target rectangle is smaller than the target frame, the pixel values outside of the
* target rectangle are undefined.
*
* This method must be called within a Filter during execution. It supports both GL-enabled
* and GL-disabled run contexts.
*
* @param target The target frame to copy to.
* @param sourceRect The source rectangle in normalized coordinates.
* @param targetRect The target rectangle in normalized coordinates.
*/
public void copyToFrame(FrameImage2D target, RectF sourceRect, RectF targetRect) {
if (GraphRunner.current().isOpenGLSupported()) {
gpuImageCopy(this, target, sourceRect, targetRect);
} else {
cpuImageCopy(this, target, sourceRect, targetRect);
}
}
static FrameImage2D create(BackingStore backingStore) {
assertCanCreate(backingStore);
return new FrameImage2D(backingStore);
}
FrameImage2D(BackingStore backingStore) {
super(backingStore);
}
static void assertCanCreate(BackingStore backingStore) {
FrameBuffer2D.assertCanCreate(backingStore);
}
private static Bitmap convertToFrameType(Bitmap bitmap, FrameType type) {
Bitmap.Config config = bitmap.getConfig();
Bitmap result = bitmap;
switch(type.getElementId()) {
case FrameType.ELEMENT_RGBA8888:
if (config != Bitmap.Config.ARGB_8888) {
result = bitmap.copy(Bitmap.Config.ARGB_8888, false);
if (result == null) {
throw new RuntimeException("Could not convert bitmap to frame-type " +
"RGBA8888!");
}
}
break;
default:
throw new IllegalArgumentException("Unsupported frame type '" + type + "' for " +
"bitmap assignment!");
}
return result;
}
private void validateBitmapSize(Bitmap bitmap, int[] dimensions) {
if (bitmap.getWidth() != dimensions[0] || bitmap.getHeight() != dimensions[1]) {
throw new IllegalArgumentException("Cannot assign bitmap of size " + bitmap.getWidth()
+ "x" + bitmap.getHeight() + " to frame of size " + dimensions[0] + "x"
+ dimensions[1] + "!");
}
}
private static void gpuImageCopy(
FrameImage2D srcImage, FrameImage2D dstImage, RectF srcRect, RectF dstRect) {
ImageShader idShader = RenderTarget.currentTarget().getIdentityShader();
// We briefly modify the shader
// TODO: Implement a safer way to save and restore a shared shader.
idShader.setSourceRect(srcRect);
idShader.setTargetRect(dstRect);
idShader.process(srcImage, dstImage);
// And reset it as others may use it as well
idShader.setSourceRect(0f, 0f, 1f, 1f);
idShader.setTargetRect(0f, 0f, 1f, 1f);
}
private static void cpuImageCopy(
FrameImage2D srcImage, FrameImage2D dstImage, RectF srcRect, RectF dstRect) {
// Convert rectangles to integer rectangles in image dimensions
Rect srcIRect = new Rect((int) srcRect.left * srcImage.getWidth(),
(int) srcRect.top * srcImage.getHeight(),
(int) srcRect.right * srcImage.getWidth(),
(int) srcRect.bottom * srcImage.getHeight());
Rect dstIRect = new Rect((int) dstRect.left * srcImage.getWidth(),
(int) dstRect.top * srcImage.getHeight(),
(int) dstRect.right * srcImage.getWidth(),
(int) dstRect.bottom * srcImage.getHeight());
// Create target canvas
Bitmap.Config config = Bitmap.Config.ARGB_8888;
Bitmap dstBitmap = Bitmap.createBitmap(dstImage.getWidth(), dstImage.getHeight(), config);
Canvas canvas = new Canvas(dstBitmap);
// Draw source bitmap into target canvas
Paint paint = new Paint();
paint.setFilterBitmap(true);
Bitmap srcBitmap = srcImage.toBitmap();
canvas.drawBitmap(srcBitmap, srcIRect, dstIRect, paint);
// Assign bitmap to output frame
dstImage.setBitmap(dstBitmap);
}
}

View File

@ -0,0 +1,473 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import androidx.media.filterfw.BackingStore.Backing;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Set;
/**
* The FrameManager tracks, caches, allocates and deallocates frame data.
* All Frame instances are managed by a FrameManager, and belong to exactly one of these. Frames
* cannot be shared across FrameManager instances, however multiple MffContexts may use the same
* FrameManager.
*
* Additionally, frame managers allow attaching Frames under a specified key. This allows decoupling
* filter-graphs by instructing one node to attach a frame under a specific key, and another to
* fetch the frame under the same key.
*/
public class FrameManager {
/** The default max cache size is set to 12 MB */
public final static int DEFAULT_MAX_CACHE_SIZE = 12 * 1024 * 1024;
/** Frame caching policy: No caching */
public final static int FRAME_CACHE_NONE = 0;
/** Frame caching policy: Drop least recently used frame buffers */
public final static int FRAME_CACHE_LRU = 1;
/** Frame caching policy: Drop least frequently used frame buffers */
public final static int FRAME_CACHE_LFU = 2;
/** Slot Flag: No flags set */
public final static int SLOT_FLAGS_NONE = 0x00;
/** Slot Flag: Sticky flag set: Frame will remain in slot after fetch. */
public final static int SLOT_FLAG_STICKY = 0x01;
private GraphRunner mRunner;
private Set<Backing> mBackings = new HashSet<Backing>();
private BackingCache mCache;
private Map<String, FrameSlot> mFrameSlots = new HashMap<String, FrameSlot>();
static class FrameSlot {
private FrameType mType;
private int mFlags;
private Frame mFrame = null;
public FrameSlot(FrameType type, int flags) {
mType = type;
mFlags = flags;
}
public FrameType getType() {
return mType;
}
public boolean hasFrame() {
return mFrame != null;
}
public void releaseFrame() {
if (mFrame != null) {
mFrame.release();
mFrame = null;
}
}
// TODO: Type check
public void assignFrame(Frame frame) {
Frame oldFrame = mFrame;
mFrame = frame.retain();
if (oldFrame != null) {
oldFrame.release();
}
}
public Frame getFrame() {
Frame result = mFrame.retain();
if ((mFlags & SLOT_FLAG_STICKY) == 0) {
releaseFrame();
}
return result;
}
public void markWritable() {
if (mFrame != null) {
mFrame.setReadOnly(false);
}
}
}
private static abstract class BackingCache {
protected int mCacheMaxSize = DEFAULT_MAX_CACHE_SIZE;
public abstract Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize);
public abstract boolean cacheBacking(Backing backing);
public abstract void clear();
public abstract int getSizeLeft();
public void setSize(int size) {
mCacheMaxSize = size;
}
public int getSize() {
return mCacheMaxSize;
}
}
private static class BackingCacheNone extends BackingCache {
@Override
public Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize) {
return null;
}
@Override
public boolean cacheBacking(Backing backing) {
return false;
}
@Override
public void clear() {
}
@Override
public int getSize() {
return 0;
}
@Override
public int getSizeLeft() {
return 0;
}
}
private static abstract class PriorityBackingCache extends BackingCache {
private int mSize = 0;
private PriorityQueue<Backing> mQueue;
public PriorityBackingCache() {
mQueue = new PriorityQueue<Backing>(4, new Comparator<Backing>() {
@Override
public int compare(Backing left, Backing right) {
return left.cachePriority - right.cachePriority;
}
});
}
@Override
public Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize) {
for (Backing backing : mQueue) {
int backingAccess = (mode == Frame.MODE_WRITE)
? backing.writeAccess()
: backing.readAccess();
if ((backingAccess & access) == access
&& dimensionsCompatible(backing.getDimensions(), dimensions)
&& (elemSize == backing.getElementSize())) {
mQueue.remove(backing);
mSize -= backing.getSize();
onFetchBacking(backing);
return backing;
}
}
//Log.w("FrameManager", "Could not find backing for dimensions " + Arrays.toString(dimensions));
return null;
}
@Override
public boolean cacheBacking(Backing backing) {
if (reserve(backing.getSize())) {
onCacheBacking(backing);
mQueue.add(backing);
return true;
}
return false;
}
@Override
public void clear() {
mQueue.clear();
mSize = 0;
}
@Override
public int getSizeLeft() {
return mCacheMaxSize - mSize;
}
protected abstract void onCacheBacking(Backing backing);
protected abstract void onFetchBacking(Backing backing);
private boolean reserve(int size) {
//Log.i("FM", "Reserving " + size + " bytes (max: " + mCacheMaxSize + " bytes).");
//Log.i("FM", "Current size " + mSize);
if (size > mCacheMaxSize) {
return false;
}
mSize += size;
while (mSize > mCacheMaxSize) {
Backing dropped = mQueue.poll();
mSize -= dropped.getSize();
//Log.i("FM", "Dropping " + dropped + " with priority "
// + dropped.cachePriority + ". New size: " + mSize + "!");
dropped.destroy();
}
return true;
}
}
private static class BackingCacheLru extends PriorityBackingCache {
private int mTimestamp = 0;
@Override
protected void onCacheBacking(Backing backing) {
backing.cachePriority = 0;
}
@Override
protected void onFetchBacking(Backing backing) {
++mTimestamp;
backing.cachePriority = mTimestamp;
}
}
private static class BackingCacheLfu extends PriorityBackingCache {
@Override
protected void onCacheBacking(Backing backing) {
backing.cachePriority = 0;
}
@Override
protected void onFetchBacking(Backing backing) {
++backing.cachePriority;
}
}
public static FrameManager current() {
GraphRunner runner = GraphRunner.current();
return runner != null ? runner.getFrameManager() : null;
}
/**
* Returns the context that the FrameManager is bound to.
*
* @return the MffContext instance that the FrameManager is bound to.
*/
public MffContext getContext() {
return mRunner.getContext();
}
/**
* Returns the GraphRunner that the FrameManager is bound to.
*
* @return the GraphRunner instance that the FrameManager is bound to.
*/
public GraphRunner getRunner() {
return mRunner;
}
/**
* Sets the size of the cache.
*
* Resizes the cache to the specified size in bytes.
*
* @param bytes the new size in bytes.
*/
public void setCacheSize(int bytes) {
mCache.setSize(bytes);
}
/**
* Returns the size of the cache.
*
* @return the size of the cache in bytes.
*/
public int getCacheSize() {
return mCache.getSize();
}
/**
* Imports a frame from another FrameManager.
*
* This will return a frame with the contents of the given frame for use in this FrameManager.
* Note, that there is a substantial cost involved in moving a Frame from one FrameManager to
* another. This may be called from any thread. After the frame has been imported, it may be
* used in the runner that uses this FrameManager. As the new frame may share data with the
* provided frame, that frame must be read-only.
*
* @param frame The frame to import
*/
public Frame importFrame(Frame frame) {
if (!frame.isReadOnly()) {
throw new IllegalArgumentException("Frame " + frame + " must be read-only to import "
+ "into another FrameManager!");
}
return frame.makeCpuCopy(this);
}
/**
* Adds a new frame slot to the frame manager.
* Filters can reference frame slots to pass frames between graphs or runs. If the name
* specified here is already taken the frame slot is overwritten. You can only
* modify frame-slots while no graph of the frame manager is running.
*
* @param name The name of the slot.
* @param type The type of Frame that will be assigned to this slot.
* @param flags A mask of {@code SLOT} flags.
*/
public void addFrameSlot(String name, FrameType type, int flags) {
assertNotRunning();
FrameSlot oldSlot = mFrameSlots.get(name);
if (oldSlot != null) {
removeFrameSlot(name);
}
FrameSlot slot = new FrameSlot(type, flags);
mFrameSlots.put(name, slot);
}
/**
* Removes a frame slot from the frame manager.
* Any frame within the slot is released. You can only modify frame-slots while no graph
* of the frame manager is running.
*
* @param name The name of the slot
* @throws IllegalArgumentException if no such slot exists.
*/
public void removeFrameSlot(String name) {
assertNotRunning();
FrameSlot slot = getSlot(name);
slot.releaseFrame();
mFrameSlots.remove(slot);
}
/**
* TODO: Document!
*/
public void storeFrame(Frame frame, String slotName) {
assertInGraphRun();
getSlot(slotName).assignFrame(frame);
}
/**
* TODO: Document!
*/
public Frame fetchFrame(String slotName) {
assertInGraphRun();
return getSlot(slotName).getFrame();
}
/**
* Clears the Frame cache.
*/
public void clearCache() {
mCache.clear();
}
/**
* Create a new FrameManager instance.
*
* Creates a new FrameManager instance in the specified context and employing a cache with the
* specified cache type (see the cache type constants defined by the FrameManager class).
*
* @param runner the GraphRunner to bind the FrameManager to.
* @param cacheType the type of cache to use.
*/
FrameManager(GraphRunner runner, int cacheType) {
mRunner = runner;
switch (cacheType) {
case FRAME_CACHE_NONE:
mCache = new BackingCacheNone();
break;
case FRAME_CACHE_LRU:
mCache = new BackingCacheLru();
break;
case FRAME_CACHE_LFU:
mCache = new BackingCacheLfu();
break;
default:
throw new IllegalArgumentException("Unknown cache-type " + cacheType + "!");
}
}
Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize) {
return mCache.fetchBacking(mode, access, dimensions, elemSize);
}
void onBackingCreated(Backing backing) {
if (backing != null) {
mBackings.add(backing);
// Log.i("FrameManager", "RM: Now have " + mBackings.size() + " backings");
}
}
void onBackingAvailable(Backing backing) {
if (!backing.shouldCache() || !mCache.cacheBacking(backing)) {
backing.destroy();
mBackings.remove(backing);
//Log.i("FrameManager", "RM: Now have " + mBackings.size() + " backings (" + mCache.getSizeLeft() + ")");
}
}
/**
* Destroying all references makes any Frames that contain them invalid.
*/
void destroyBackings() {
for (Backing backing : mBackings) {
backing.destroy();
}
mBackings.clear();
mCache.clear();
}
FrameSlot getSlot(String name) {
FrameSlot slot = mFrameSlots.get(name);
if (slot == null) {
throw new IllegalArgumentException("Unknown frame slot '" + name + "'!");
}
return slot;
}
void onBeginRun() {
for (FrameSlot slot : mFrameSlots.values()) {
slot.markWritable();
}
}
// Internals ///////////////////////////////////////////////////////////////////////////////////
private static boolean dimensionsCompatible(int[] dimA, int[] dimB) {
return dimA == null || dimB == null || Arrays.equals(dimA, dimB);
}
private void assertNotRunning() {
if (mRunner.isRunning()) {
throw new IllegalStateException("Attempting to modify FrameManager while graph is "
+ "running!");
}
}
private void assertInGraphRun() {
if (!mRunner.isRunning() || GraphRunner.current() != mRunner) {
throw new IllegalStateException("Attempting to access FrameManager Frame data "
+ "outside of graph run-loop!");
}
}
}

View File

@ -0,0 +1,184 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package androidx.media.filterfw;
import java.util.Vector;
class FrameQueue {
public static class Builder {
private FrameType mReadType = null;
private FrameType mWriteType = null;
private Vector<FrameQueue> mAttachedQueues = new Vector<FrameQueue>();
public Builder() {}
public void setWriteType(FrameType type) {
mWriteType = type;
}
public void setReadType(FrameType type) {
mReadType = type;
}
public void attachQueue(FrameQueue queue) {
mAttachedQueues.add(queue);
}
public FrameQueue build(String name) {
FrameType type = buildType();
// TODO: This currently does not work correctly (Try camera -> branch -> target-slot)
//validateType(type, name);
FrameQueue result = new FrameQueue(type, name);
buildQueueImpl(result);
return result;
}
private void buildQueueImpl(FrameQueue queue) {
QueueImpl queueImpl = queue.new SingleFrameQueueImpl();
queue.mQueueImpl = queueImpl;
}
private FrameType buildType() {
FrameType result = FrameType.merge(mWriteType, mReadType);
for (FrameQueue queue : mAttachedQueues) {
result = FrameType.merge(result, queue.mType);
}
return result;
}
/*
private void validateType(FrameType type, String queueName) {
if (!type.isSpecified()) {
throw new RuntimeException("Cannot build connection queue '" + queueName + "' as "
+ "its type (" + type + ") is underspecified!");
}
}
*/
}
private interface QueueImpl {
public boolean canPull();
public boolean canPush();
public Frame pullFrame();
public Frame fetchAvailableFrame(int[] dimensions);
public Frame peek();
public void pushFrame(Frame frame);
public void clear();
}
private class SingleFrameQueueImpl implements QueueImpl {
private Frame mFrame = null;
@Override
public boolean canPull() {
return mFrame != null;
}
@Override
public boolean canPush() {
return mFrame == null;
}
@Override
public Frame pullFrame() {
Frame result = mFrame;
mFrame = null;
return result;
}
@Override
public Frame peek() {
return mFrame;
}
@Override
public Frame fetchAvailableFrame(int[] dimensions) {
// Note that we cannot use a cached frame here, as we do not know where that cached
// instance would end up.
FrameManager manager = FrameManager.current();
return new Frame(mType, dimensions, manager);
}
@Override
public void pushFrame(Frame frame) {
mFrame = frame.retain();
mFrame.setReadOnly(true);
}
@Override
public void clear() {
if (mFrame != null) {
mFrame.release();
mFrame = null;
}
}
}
private QueueImpl mQueueImpl;
private FrameType mType;
private String mName;
public FrameType getType() {
return mType;
}
public boolean canPull() {
return mQueueImpl.canPull();
}
public boolean canPush() {
return mQueueImpl.canPush();
}
public Frame pullFrame() {
return mQueueImpl.pullFrame();
}
public Frame fetchAvailableFrame(int[] dimensions) {
return mQueueImpl.fetchAvailableFrame(dimensions);
}
public void pushFrame(Frame frame) {
mQueueImpl.pushFrame(frame);
}
public Frame peek() {
return mQueueImpl.peek();
}
@Override
public String toString() {
return mName;
}
public void clear() {
mQueueImpl.clear();
}
private FrameQueue(FrameType type, String name) {
mType = type;
mName = name;
}
}

View File

@ -0,0 +1,49 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.base;
import androidx.media.filterfw.*;
public final class FrameSlotSource extends SlotFilter {
public FrameSlotSource(MffContext context, String name, String slotName) {
super(context, name, slotName);
}
@Override
public Signature getSignature() {
// TODO: It would be nice if we could return the slot type here. Not currently possible
// as getSignature() is typically called before a FrameManager and its slots are setup.
return new Signature()
.addOutputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
.disallowOtherPorts();
}
@Override
protected boolean canSchedule() {
return super.canSchedule() && slotHasFrame();
}
@Override
protected void onProcess() {
Frame frame = getFrameManager().fetchFrame(mSlotName);
getConnectedOutputPort("frame").pushFrame(frame);
frame.release();
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.base;
import androidx.media.filterfw.*;
public final class FrameSlotTarget extends SlotFilter {
public FrameSlotTarget(MffContext context, String name, String slotName) {
super(context, name, slotName);
}
@Override
public Signature getSignature() {
// TODO: It would be nice if we could return the slot type here. Not currently possible
// as getSignature() is typically called before a FrameManager and its slots are setup.
return new Signature()
.addInputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
.disallowOtherPorts();
}
@Override
protected void onProcess() {
Frame frame = getConnectedInputPort("frame").pullFrame();
getFrameManager().storeFrame(frame, mSlotName);
}
}

View File

@ -0,0 +1,430 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package androidx.media.filterfw;
/**
* A FrameType instance specifies the data format of a Frame.
*
* FrameTypes are used mainly by Filters to specify the data type they intend to consume or produce.
* When filters are connected, their FrameType information is analyzed and checked for
* compatibility. This allows Filter writers to assume a certain data input type. It also helps
* filter-graph designers determine which filters can be hooked up to one another.
*
* A FrameType generally consists of an element type and number of dimensions. The currently
* supported element types are:
*
* <ul>
* <li>int8, int16, int32, in64</li>
* <li>float32, float64</li>
* <li>rgba8888</li>
* <li>object</li>
* <li>don't-care</li>
* </ul>
*
* If the object element type is used, class information may be appended to the FrameType to
* indicate what class of objects are expected. When constructing an object based FrameType, you
* have the option of either specifying a type that represents a single object of that class, or
* an array of objects (see the {@link #single()} and {@link #array()} constructors). A single
* object has a dimensionality of 0, while an array has a dimensionality of 1.
*
* When constructing a non-object type, you have the option of creating a 1D or 2D buffer, or
* a 2D image (see the {@link #buffer1D(int)}, {@link #buffer2D(int)}, and
* {@link #image2D(int, int)} constructors). To optimize access, provide access hints when making
* an image type.
*
* Finally, it is possible to create a wild-card type with the {@link #any()} constructor. This
* type matches any other type. Note, that this is a more general type than a {@code single(Object)}
* type that matches only object-base types (of any Object subclass). You may also specify the
* leave the element of any type unspecified by using the {@code ELEMENT_DONTCARE} constant.
*
* When a graph is connected the types between outputs and inputs are merged to a queue-type. All
* Frames in this queue will be of that type. In order for a merge to succeed the following
* conditions must hold:
*
* <ul>
* <li>The element types must be identical.</li>
* <li>The dimensions must match (except for singles and arrays, see below).</li>
* <li>For object-based types: The classes must be compatible.</li>
* <li>If one of the types is a wild-card, both types are always compatible.</li>
* </ul>
*
* Class compatibility is determined in an optimistic fashion, i.e. one class must be the subclass
* of the other. It does not matter which of the types is the subclass of the other. For instance,
* if one Filter outputs a type of class {@code Object}, and the consumer expects a Filter of type
* {@code Bitmap}, the connection is considered compatible. (Of course if at runtime a non-Bitmap
* object is produced, this will cause a runtime exception to be thrown).
*
* For convenience, single and array object-based types are compatible with one another. This
* in turn means that Frames with a single object can be accessed as an array with a single entry,
* and array based Frames can be accessed as a single object of the array class. For this reason
* you should prefer consuming objects as array types (if it makes sense for that specific port),
* as this will allow your Filter to handle multiple objects in one Frame while not giving up the
* possibility to deal with singles.
* TODO: This needs to be reworked. An array(int) should not be interchangeable with a single(int),
* but rather with a single(int[]). Use ArraySelectFilter for the former!
*
* After the types are merged, the queue-type must be a fully specified type. This means that the
* type must have its element and dimensions specified. This ensures that filters that need to
* query their input or output types receive meaningful information.
*/
public final class FrameType {
public final static int ELEMENT_DONTCARE = 0;
public final static int ELEMENT_OBJECT = 1;
public final static int ELEMENT_INT8 = 100;
public final static int ELEMENT_INT16 = 101;
public final static int ELEMENT_INT32 = 102;
public final static int ELEMENT_INT64 = 103;
public final static int ELEMENT_FLOAT32 = 200;
public final static int ELEMENT_FLOAT64 = 201;
public final static int ELEMENT_RGBA8888 = 301;
public final static int READ_CPU = 0x01;
public final static int READ_GPU = 0x02;
public final static int READ_ALLOCATION = 0x04;
public final static int WRITE_CPU = 0x08;
public final static int WRITE_GPU = 0x10;
public final static int WRITE_ALLOCATION = 0x20;
private final static int ACCESS_UNKNOWN = 0x00;
private final int mElementId;
private final int mDimensions;
private final int mAccessHints;
private final Class<?> mClass;
private static SimpleCache<String, FrameType> mTypeCache =
new SimpleCache<String, FrameType>(64);
/**
* Constructs a wild-card FrameType that matches any other FrameType.
* @return The wild-card FrameType instance.
*/
public static FrameType any() {
return FrameType.fetchType(ELEMENT_DONTCARE, -1, ACCESS_UNKNOWN);
}
/**
* Constructs an object-based single FrameType that matches object-based FrameTypes of any
* class.
* @return A single object-based FrameType instance.
*/
public static FrameType single() {
return FrameType.fetchType(null, 0);
}
/**
* Constructs an object-based single FrameType of the specified class.
* @param clazz The class of the FrameType.
* @return A single object-base FrameType instance of the specified class.
*/
public static FrameType single(Class<?> clazz) {
return FrameType.fetchType(clazz, 0);
}
/**
* Constructs an object-based array FrameType that matches object-based FrameTypes of any class.
* @return An array object-based FrameType instance.
*/
public static FrameType array() {
return FrameType.fetchType(null, 1);
}
/**
* Constructs an object-based array FrameType with elements of the specified class.
* @param clazz The class of the array elements (not the array type).
* @return An array object-based FrameType instance of the specified class.
*/
public static FrameType array(Class<?> clazz) {
return FrameType.fetchType(clazz, 1);
}
/**
* Constructs a one-dimensional buffer type of the specified element.
* @param elementType One of the {@code ELEMENT} constants.
* @return A 1D buffer FrameType instance.
*/
public static FrameType buffer1D(int elementType) {
return FrameType.fetchType(elementType, 1, ACCESS_UNKNOWN);
}
/**
* Constructs a two-dimensional buffer type of the specified element.
* @param elementType One of the {@code ELEMENT} constants.
* @return A 2D buffer FrameType instance.
*/
public static FrameType buffer2D(int elementType) {
return FrameType.fetchType(elementType, 2, ACCESS_UNKNOWN);
}
/**
* Constructs a two-dimensional image type of the specified element.
* @param elementType One of the {@code ELEMENT} constants.
* @param accessHint A bit-mask of access flags (see {@code READ} and {@code WRITE} constants).
* @return A 2D image FrameType instance.
*/
public static FrameType image2D(int elementType, int accessHint) {
return FrameType.fetchType(elementType, 2, accessHint);
}
/**
* Converts the current array type to a single type.
* The type must be an object-based type. If the type is already a single type, this does
* nothing.
* @return type as a single type.
*/
public FrameType asSingle() {
if (mElementId != ELEMENT_OBJECT) {
throw new RuntimeException("Calling asSingle() on non-object type!");
}
return FrameType.fetchType(mClass, 0);
}
/**
* Converts the current single type to an array type.
* The type must be an object-based type. If the type is already an array type, this does
* nothing.
* @return type as an array type.
*/
public FrameType asArray() {
if (mElementId != ELEMENT_OBJECT) {
throw new RuntimeException("Calling asArray() on non-object type!");
}
return FrameType.fetchType(mClass, 1);
}
/**
* Returns the FrameType's class specifier, or null if no class was set or the receiver is not
* an object-based type.
* @return The FrameType's class specifier or null.
*/
public Class<?> getContentClass() {
return mClass;
}
/**
* Returns the FrameType's element id.
* @return The element id constant.
*/
public int getElementId() {
return mElementId;
}
/**
* Returns the number of bytes of the FrameType's element, or 0 if no such size can be
* determined.
* @return The number of bytes of the FrameType's element.
*/
public int getElementSize() {
switch (mElementId) {
case ELEMENT_INT8:
return 1;
case ELEMENT_INT16:
return 2;
case ELEMENT_INT32:
case ELEMENT_FLOAT32:
case ELEMENT_RGBA8888:
return 4;
case ELEMENT_INT64:
case ELEMENT_FLOAT64:
return 4;
default:
return 0;
}
}
/**
* Returns the access hints bit-mask of the FrameType.
* @return The access hints bit-mask of the FrameType.
*/
public int getAccessHints() {
return mAccessHints;
}
/**
* Returns the number of dimensions of the FrameType or -1 if no dimensions were set.
* @return The number of dimensions of the FrameType.
*/
public int getNumberOfDimensions() {
return mDimensions;
}
/**
* Returns true, if the FrameType is fully specified.
*
* A FrameType is fully specified if its element and dimensions are specified.
*
* @return true, if the FrameType is fully specified.
*/
public boolean isSpecified() {
return mElementId != ELEMENT_DONTCARE && mDimensions >= 0;
}
@Override
public boolean equals(Object object) {
if (object instanceof FrameType) {
FrameType type = (FrameType) object;
return mElementId == type.mElementId && mDimensions == type.mDimensions
&& mAccessHints == type.mAccessHints && mClass == type.mClass;
}
return false;
}
@Override
public int hashCode() {
return mElementId ^ mDimensions ^ mAccessHints ^ mClass.hashCode();
}
@Override
public String toString() {
String result = elementToString(mElementId, mClass) + "[" + mDimensions + "]";
if ((mAccessHints & READ_CPU) != 0) {
result += "(rcpu)";
}
if ((mAccessHints & READ_GPU) != 0) {
result += "(rgpu)";
}
if ((mAccessHints & READ_ALLOCATION) != 0) {
result += "(ralloc)";
}
if ((mAccessHints & WRITE_CPU) != 0) {
result += "(wcpu)";
}
if ((mAccessHints & WRITE_GPU) != 0) {
result += "(wgpu)";
}
if ((mAccessHints & WRITE_ALLOCATION) != 0) {
result += "(walloc)";
}
return result;
}
String keyString() {
return keyValueForType(mElementId, mDimensions, mAccessHints, mClass);
}
static FrameType tryMerge(FrameType writer, FrameType reader) {
if (writer.mElementId == ELEMENT_DONTCARE) {
return reader;
} else if (reader.mElementId == ELEMENT_DONTCARE) {
return writer;
} else if (writer.mElementId == ELEMENT_OBJECT && reader.mElementId == ELEMENT_OBJECT) {
return tryMergeObjectTypes(writer, reader);
} else if (writer.mDimensions > 0 && writer.mElementId == reader.mElementId) {
return tryMergeBuffers(writer, reader);
} else {
return null;
}
}
static FrameType tryMergeObjectTypes(FrameType writer, FrameType reader) {
int dimensions = Math.max(writer.mDimensions, reader.mDimensions);
Class<?> mergedClass = mergeClasses(writer.mClass, reader.mClass);
boolean success = mergedClass != null || writer.mClass == null;
return success ? FrameType.fetchType(mergedClass, dimensions) : null;
}
static FrameType tryMergeBuffers(FrameType writer, FrameType reader) {
if (writer.mDimensions == reader.mDimensions) {
int accessHints = writer.mAccessHints | reader.mAccessHints;
return FrameType.fetchType(writer.mElementId, writer.mDimensions, accessHints);
}
return null;
}
static FrameType merge(FrameType writer, FrameType reader) {
FrameType result = tryMerge(writer, reader);
if (result == null) {
throw new RuntimeException(
"Incompatible types in connection: " + writer + " vs. " + reader + "!");
}
return result;
}
private static String keyValueForType(int elemId, int dims, int hints, Class<?> clazz) {
return elemId + ":" + dims + ":" + hints + ":" + (clazz != null ? clazz.getName() : "0");
}
private static String elementToString(int elemId, Class<?> clazz) {
switch (elemId) {
case ELEMENT_INT8:
return "int8";
case ELEMENT_INT16:
return "int16";
case ELEMENT_INT32:
return "int32";
case ELEMENT_INT64:
return "int64";
case ELEMENT_FLOAT32:
return "float32";
case ELEMENT_FLOAT64:
return "float64";
case ELEMENT_RGBA8888:
return "rgba8888";
case ELEMENT_OBJECT:
return "<" + (clazz == null ? "*" : clazz.getSimpleName()) + ">";
case ELEMENT_DONTCARE:
return "*";
default:
return "?";
}
}
private static Class<?> mergeClasses(Class<?> classA, Class<?> classB) {
// Return the most specialized class.
if (classA == null) {
return classB;
} else if (classB == null) {
return classA;
} else if (classA.isAssignableFrom(classB)) {
return classB;
} else if (classB.isAssignableFrom(classA)) {
return classA;
} else {
return null;
}
}
private static FrameType fetchType(int elementId, int dimensions, int accessHints) {
return fetchType(elementId, dimensions, accessHints, null);
}
private static FrameType fetchType(Class<?> clazz, int dimensions) {
return fetchType(ELEMENT_OBJECT, dimensions, ACCESS_UNKNOWN, clazz);
}
private static FrameType fetchType(
int elementId, int dimensions, int accessHints, Class<?> clazz) {
String typeKey = FrameType.keyValueForType(elementId, dimensions, accessHints, clazz);
FrameType type = mTypeCache.get(typeKey);
if (type == null) {
type = new FrameType(elementId, dimensions, accessHints, clazz);
mTypeCache.put(typeKey, type);
}
return type;
}
private FrameType(int elementId, int dimensions, int accessHints, Class<?> clazz) {
mElementId = elementId;
mDimensions = dimensions;
mClass = clazz;
mAccessHints = accessHints;
}
}

View File

@ -0,0 +1,50 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import androidx.media.filterfw.BackingStore.Backing;
public class FrameValue extends Frame {
public Object getValue() {
Object result = mBackingStore.lockData(MODE_READ, BackingStore.ACCESS_OBJECT);
mBackingStore.unlock();
return result;
}
public void setValue(Object value) {
Backing backing = mBackingStore.lockBacking(MODE_WRITE, BackingStore.ACCESS_OBJECT);
backing.setData(value);
mBackingStore.unlock();
}
static FrameValue create(BackingStore backingStore) {
assertObjectBased(backingStore.getFrameType());
return new FrameValue(backingStore);
}
FrameValue(BackingStore backingStore) {
super(backingStore);
}
static void assertObjectBased(FrameType type) {
if (type.getElementId() != FrameType.ELEMENT_OBJECT) {
throw new RuntimeException("Cannot access non-object based Frame as FrameValue!");
}
}
}

View File

@ -0,0 +1,155 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import java.lang.reflect.Array;
public class FrameValues extends FrameValue {
/**
* Returns the number of values in the Frame.
*
* This returns 1, if the Frame value is null, or if the value is not an array.
*
* @return The number of values in the Frame.
*/
public int getCount() {
Object value = super.getValue();
if (value == null || !value.getClass().isArray()) {
return 1;
} else {
return Array.getLength(super.getValue());
}
}
/**
* Returns the values in the Frame as an array.
*
* Note, that this may be called on Frames that have a non-array object assigned to them. In
* that case, this method will wrap the object in an array and return that. This way, filters
* can treat any object based frame as arrays.
*
* @return The array of values in this frame.
*/
public Object getValues() {
Object value = super.getValue();
if (value == null || value.getClass().isArray()) {
return super.getValue();
} else {
// Allow reading a single as an array.
Object[] array = (Object[])Array.newInstance(value.getClass(), 1);
array[0] = value;
return array;
}
}
/**
* Returns the value at the specified index.
*
* In case the value is null or not an array, the index must be 0, and the value itself is
* returned.
*
* @param index The index to access.
* @return The value at that index.
*/
public Object getValueAtIndex(int index) {
Object value = super.getValue();
if (value == null || !value.getClass().isArray()) {
if (index != 0) {
throw new ArrayIndexOutOfBoundsException(index);
} else {
return value;
}
} else {
return Array.get(value, index);
}
}
/**
* Returns the value as a FrameValue at the specified index.
*
* Use this if you want to access elements as FrameValues. You must release the result when
* you are done using it.
*
* @param index The index to access.
* @return The value as a FrameValue at that index (must release).
*/
public FrameValue getFrameValueAtIndex(int index) {
Object value = getValueAtIndex(index);
FrameValue result = Frame.create(getType().asSingle(), new int[0]).asFrameValue();
result.setValue(value);
return result;
}
/**
* Assign the array of values to the frame.
*
* You may assign null or a non-array object, which are interpreted as a 1-length array.
*
* @param values The values to assign to the frame.
*/
public void setValues(Object values) {
super.setValue(values);
}
/**
* Assign a value at the specified index.
*
* In case the held value is not an array, the index must be 0, and the object will be replaced
* by the new object.
*
* @param value The value to assign.
* @param index The index to assign to.
*/
public void setValueAtIndex(Object value, int index) {
super.assertAccessible(MODE_WRITE);
Object curValue = super.getValue();
if (curValue == null || !curValue.getClass().isArray()) {
if (index != 0) {
throw new ArrayIndexOutOfBoundsException(index);
} else {
curValue = value;
}
} else {
Array.set(curValue, index, value);
}
}
/**
* Assign a FrameValue's value at the specified index.
*
* This method unpacks the FrameValue and assigns the unpacked value to the specified index.
* This does not affect the retain-count of the passed Frame.
*
* @param frame The frame value to assign.
* @param index The index to assign to.
*/
public void setFrameValueAtIndex(FrameValue frame, int index) {
Object value = frame.getValue();
setValueAtIndex(value, index);
}
static FrameValues create(BackingStore backingStore) {
assertObjectBased(backingStore.getFrameType());
return new FrameValues(backingStore);
}
FrameValues(BackingStore backingStore) {
super(backingStore);
}
}

View File

@ -0,0 +1,194 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Looper;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* TODO: Make this package-private as RenderTarget and TextureSource should suffice as public
* facing OpenGL utilities.
* @hide
*/
public class GLToolbox {
public static int textureNone() {
return 0;
}
public static boolean isTexture(int texId) {
return GLES20.glIsTexture(texId);
}
public static void deleteTexture(int texId) {
int[] textures = new int[] { texId };
assertNonUiThread("glDeleteTextures");
GLES20.glDeleteTextures(1, textures, 0);
checkGlError("glDeleteTextures");
}
public static void deleteFbo(int fboId) {
int[] fbos = new int[] { fboId };
assertNonUiThread("glDeleteFramebuffers");
GLES20.glDeleteFramebuffers(1, fbos, 0);
checkGlError("glDeleteFramebuffers");
}
public static int generateTexture() {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
checkGlError("glGenTextures");
return textures[0];
}
public static int generateFbo() {
int[] fbos = new int[1];
GLES20.glGenFramebuffers(1, fbos, 0);
checkGlError("glGenFramebuffers");
return fbos[0];
}
public static void readFbo(int fboId, ByteBuffer pixels, int width, int height) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels);
checkGlError("glReadPixels");
}
public static void readTarget(RenderTarget target, ByteBuffer pixels, int width, int height) {
target.focus();
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels);
checkGlError("glReadPixels");
}
public static int attachedTexture(int fboId) {
int[] params = new int[1];
GLES20.glGetFramebufferAttachmentParameteriv(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME,
params, 0);
checkGlError("glGetFramebufferAttachmentParameteriv");
return params[0];
}
public static void attachTextureToFbo(int texId, int fboId) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
texId,
0);
checkGlError("glFramebufferTexture2D");
}
public static void allocateTexturePixels(int texId, int target, int width, int height) {
setTexturePixels(texId, target, (ByteBuffer)null, width, height);
}
public static void setTexturePixels(int texId, int target, Bitmap bitmap) {
GLES20.glBindTexture(target, texId);
GLUtils.texImage2D(target, 0, bitmap, 0);
checkGlError("glTexImage2D");
setDefaultTexParams();
}
public static void setTexturePixels(int texId, int target, ByteBuffer pixels,
int width, int height) {
GLES20.glBindTexture(target, texId);
// For some devices, "pixels" being null causes system error.
if (pixels == null) {
pixels = ByteBuffer.allocateDirect(width * height * 4);
}
GLES20.glTexImage2D(target, 0, GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels);
checkGlError("glTexImage2D");
setDefaultTexParams();
}
public static void setDefaultTexParams() {
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameteri");
}
public static int vboNone() {
return 0;
}
public static int generateVbo() {
int[] vbos = new int[1];
GLES20.glGenBuffers(1, vbos, 0);
checkGlError("glGenBuffers");
return vbos[0];
}
public static void setVboData(int vboId, ByteBuffer data) {
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, data.remaining(), data, GLES20.GL_STATIC_DRAW);
checkGlError("glBufferData");
}
public static void setVboFloats(int vboId, float[] values) {
int len = values.length * 4;
ByteBuffer buffer = ByteBuffer.allocateDirect(len).order(ByteOrder.nativeOrder());
setVboData(vboId, buffer);
}
public static boolean isVbo(int vboId) {
return GLES20.glIsBuffer(vboId);
}
public static void deleteVbo(int vboId) {
int[] buffers = new int[] { vboId };
GLES20.glDeleteBuffers(1, buffers, 0);
checkGlError("glDeleteBuffers");
}
public static void checkGlError(String operation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
throw new RuntimeException("GL Operation '" + operation + "' caused error "
+ Integer.toHexString(error) + "!");
}
}
/**
* Make sure we are not operating in the UI thread.
*
* It is often tricky to track down bugs that happen when issuing GL commands in the UI thread.
* This is especially true when releasing GL resources. Often this will cause errors much later
* on. Therefore we make sure we do not do these dangerous operations on the UI thread.
*/
private static void assertNonUiThread(String operation) {
if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
throw new RuntimeException("Attempting to perform GL operation '" + operation
+ "' on UI thread!");
}
}
}

View File

@ -0,0 +1,199 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This class provides functions to export a FilterGraph.
package androidx.media.filterfw;
import android.content.Context;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.Set;
/**
* This class provides functions to export a FilterGraph as a DOT file.
*/
public class GraphExporter {
/**
* Exports the graph as DOT (see http://en.wikipedia.org/wiki/DOT_language).
* Using the exported file, the graph can be visualized e.g. with the command line tool dot.
* Optionally, one may /exclude/ unconnected optional ports (third parameter = false),
* since they can quickly clutter the visualization (and, depending on the purpose, may not
* be interesting).
*
* Example workflow:
* 1. run application on device, make sure it calls exportGraphAsDOT(...);
* 2. adb pull /data/data/<application name>/files/<graph filename>.gv graph.gv
* 3. dot -Tpng graph.gv -o graph.png
* 4. eog graph.png
*/
static public void exportAsDot(FilterGraph graph, String filename,
boolean includeUnconnectedOptionalPorts)
throws java.io.FileNotFoundException, java.io.IOException {
// Initialize, open file stream
Context myAppContext = graph.getContext().getApplicationContext();
Filter[] filters = graph.getAllFilters();
FileOutputStream fOut = myAppContext.openFileOutput(filename, Context.MODE_PRIVATE);
OutputStreamWriter dotFile = new OutputStreamWriter(fOut);
// Write beginning of DOT file
dotFile.write("digraph graphname {\n");
dotFile.write(" node [shape=record];\n");
// N.B. For specification and lots of examples of the DOT language, see
// http://www.graphviz.org/Documentation/dotguide.pdf
// Iterate over all filters of the graph, write corresponding DOT node elements
for(Filter filter : filters) {
dotFile.write(getDotName(" " + filter.getName()) + " [label=\"{");
// Write upper part of element (i.e., input ports)
Set<String> inputPorts = getInputPorts(filter, includeUnconnectedOptionalPorts);
if(inputPorts.size() > 0) {
dotFile.write(" { ");
int counter = 0;
for(String p : inputPorts) {
dotFile.write("<" + getDotName(p) + "_IN>" + p);
if(++counter != inputPorts.size()) dotFile.write(" | ");
}
dotFile.write(" } | ");
}
// Write center part of element (i.e., element label)
dotFile.write(filter.getName());
// Write lower part of element (i.e., output ports)
Set<String> outputPorts = getOutputPorts(filter, includeUnconnectedOptionalPorts);
if(outputPorts.size() > 0) {
dotFile.write(" | { ");
int counter = 0;
for(String p : outputPorts) {
dotFile.write("<" + getDotName(p) + "_OUT>" + p);
if(++counter != outputPorts.size()) dotFile.write(" | ");
}
dotFile.write(" } ");
}
dotFile.write("}\"];\n");
}
dotFile.write("\n");
// Iterate over all filters again to collect connections and find unconnected ports
int dummyNodeCounter = 0;
for(Filter filter : filters) {
Set<String> outputPorts = getOutputPorts(filter, includeUnconnectedOptionalPorts);
for(String portName : outputPorts) {
OutputPort source = filter.getConnectedOutputPort(portName);
if(source != null) {
// Found a connection, draw it
InputPort target = source.getTarget();
dotFile.write(" " +
getDotName(source.getFilter().getName()) + ":" +
getDotName(source.getName()) + "_OUT -> " +
getDotName(target.getFilter().getName()) + ":" +
getDotName(target.getName()) + "_IN;\n" );
} else {
// Found a unconnected output port, add dummy node
String color = filter.getSignature().getOutputPortInfo(portName).isRequired()
? "red" : "blue"; // red for unconnected, required ports
dotFile.write(" " +
"dummy" + (++dummyNodeCounter) +
" [shape=point,label=\"\",color=" + color + "];\n" +
" " + getDotName(filter.getName()) + ":" +
getDotName(portName) + "_OUT -> " +
"dummy" + dummyNodeCounter + " [color=" + color + "];\n");
}
}
Set<String> inputPorts = getInputPorts(filter, includeUnconnectedOptionalPorts);
for(String portName : inputPorts) {
InputPort target = filter.getConnectedInputPort(portName);
if(target != null) {
// Found a connection -- nothing to do, connections have been written out above
} else {
// Found a unconnected input port, add dummy node
String color = filter.getSignature().getInputPortInfo(portName).isRequired()
? "red" : "blue"; // red for unconnected, required ports
dotFile.write(" " +
"dummy" + (++dummyNodeCounter) +
" [shape=point,label=\"\",color=" + color + "];\n" +
" dummy" + dummyNodeCounter + " -> " +
getDotName(filter.getName()) + ":" +
getDotName(portName) + "_IN [color=" + color + "];\n");
}
}
}
// Write end of DOT file, close file stream
dotFile.write("}\n");
dotFile.flush();
dotFile.close();
}
// Internal methods
// From element's name in XML, create DOT-allowed element name
static private String getDotName(String raw) {
return raw.replaceAll("\\.", "___"); // DOT does not allow . in element names
}
// Retrieve all input ports of a filter, including:
// unconnected ports (which can not be retrieved from the filter, only from the signature), and
// additional (connected) ports not listed in the signature (which is allowed by default,
// unless disallowOtherInputs is defined in signature).
// With second parameter = false, *omit* unconnected optional ports.
static private Set<String> getInputPorts(Filter filter, boolean includeUnconnectedOptional) {
// add (connected) ports from filter
Set<String> ports = new HashSet<String>();
ports.addAll(filter.getConnectedInputPortMap().keySet());
// add (unconnected) ports from signature
HashMap<String, Signature.PortInfo> signaturePorts = filter.getSignature().getInputPorts();
if(signaturePorts != null){
for(Entry<String, Signature.PortInfo> e : signaturePorts.entrySet()) {
if(includeUnconnectedOptional || e.getValue().isRequired()) {
ports.add(e.getKey());
}
}
}
return ports;
}
// Retrieve all output ports of a filter (analogous to above function)
static private Set<String> getOutputPorts(Filter filter, boolean includeUnconnectedOptional) {
// add (connected) ports from filter
Set<String> ports = new HashSet<String>();
ports.addAll(filter.getConnectedOutputPortMap().keySet());
// add (unconnected) ports from signature
HashMap<String, Signature.PortInfo> signaturePorts = filter.getSignature().getOutputPorts();
if(signaturePorts != null){
for(Entry<String, Signature.PortInfo> e : signaturePorts.entrySet()) {
if(includeUnconnectedOptional || e.getValue().isRequired()) {
ports.add(e.getKey());
}
}
}
return ports;
}
}

View File

@ -0,0 +1,58 @@
// Copyright 2012 Google Inc. All Rights Reserved.
package androidx.media.filterpacks.base;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.Frame;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.Signature;
public class GraphInputSource extends Filter {
private Frame mFrame = null;
public GraphInputSource(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
return new Signature()
.addOutputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
.disallowOtherInputs();
}
public void pushFrame(Frame frame) {
if (mFrame != null) {
mFrame.release();
}
if (frame == null) {
throw new RuntimeException("Attempting to assign null-frame!");
}
mFrame = frame.retain();
}
@Override
protected void onProcess() {
if (mFrame != null) {
getConnectedOutputPort("frame").pushFrame(mFrame);
mFrame.release();
mFrame = null;
}
}
@Override
protected void onTearDown() {
if (mFrame != null) {
mFrame.release();
mFrame = null;
}
}
@Override
protected boolean canSchedule() {
return super.canSchedule() && mFrame != null;
}
}

View File

@ -0,0 +1,60 @@
// Copyright 2012 Google Inc. All Rights Reserved.
package androidx.media.filterpacks.base;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.Frame;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.Signature;
public class GraphOutputTarget extends Filter {
private Frame mFrame = null;
private FrameType mType = FrameType.any();
public GraphOutputTarget(MffContext context, String name) {
super(context, name);
}
// TODO: During initialization only?
public void setType(FrameType type) {
mType = type;
}
public FrameType getType() {
return mType;
}
@Override
public Signature getSignature() {
return new Signature()
.addInputPort("frame", Signature.PORT_REQUIRED, mType)
.disallowOtherInputs();
}
// Returns a retained frame!
public Frame pullFrame() {
Frame result = null;
if (mFrame != null) {
result = mFrame;
mFrame = null;
}
return result;
}
@Override
protected void onProcess() {
Frame frame = getConnectedInputPort("frame").pullFrame();
if (mFrame != null) {
mFrame.release();
}
mFrame = frame.retain();
}
@Override
protected boolean canSchedule() {
return super.canSchedule() && mFrame == null;
}
}

View File

@ -0,0 +1,576 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.text.TextUtils;
import java.io.InputStream;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
/**
* A GraphReader allows obtaining filter graphs from XML graph files or strings.
*/
public class GraphReader {
private static interface Command {
public void execute(CommandStack stack);
}
private static class CommandStack {
private ArrayList<Command> mCommands = new ArrayList<Command>();
private FilterGraph.Builder mBuilder;
private FilterFactory mFactory;
private MffContext mContext;
public CommandStack(MffContext context) {
mContext = context;
mBuilder = new FilterGraph.Builder(mContext);
mFactory = new FilterFactory();
}
public void execute() {
for (Command command : mCommands) {
command.execute(this);
}
}
public void append(Command command) {
mCommands.add(command);
}
public FilterFactory getFactory() {
return mFactory;
}
public MffContext getContext() {
return mContext;
}
protected FilterGraph.Builder getBuilder() {
return mBuilder;
}
}
private static class ImportPackageCommand implements Command {
private String mPackageName;
public ImportPackageCommand(String packageName) {
mPackageName = packageName;
}
@Override
public void execute(CommandStack stack) {
try {
stack.getFactory().addPackage(mPackageName);
} catch (IllegalArgumentException e) {
throw new RuntimeException(e.getMessage());
}
}
}
private static class AddLibraryCommand implements Command {
private String mLibraryName;
public AddLibraryCommand(String libraryName) {
mLibraryName = libraryName;
}
@Override
public void execute(CommandStack stack) {
FilterFactory.addFilterLibrary(mLibraryName);
}
}
private static class AllocateFilterCommand implements Command {
private String mClassName;
private String mFilterName;
public AllocateFilterCommand(String className, String filterName) {
mClassName = className;
mFilterName = filterName;
}
@Override
public void execute(CommandStack stack) {
Filter filter = null;
try {
filter = stack.getFactory().createFilterByClassName(mClassName,
mFilterName,
stack.getContext());
} catch (IllegalArgumentException e) {
throw new RuntimeException("Error creating filter " + mFilterName + "!", e);
}
stack.getBuilder().addFilter(filter);
}
}
private static class AddSourceSlotCommand implements Command {
private String mName;
private String mSlotName;
public AddSourceSlotCommand(String name, String slotName) {
mName = name;
mSlotName = slotName;
}
@Override
public void execute(CommandStack stack) {
stack.getBuilder().addFrameSlotSource(mName, mSlotName);
}
}
private static class AddTargetSlotCommand implements Command {
private String mName;
private String mSlotName;
public AddTargetSlotCommand(String name, String slotName) {
mName = name;
mSlotName = slotName;
}
@Override
public void execute(CommandStack stack) {
stack.getBuilder().addFrameSlotTarget(mName, mSlotName);
}
}
private static class AddVariableCommand implements Command {
private String mName;
private Object mValue;
public AddVariableCommand(String name, Object value) {
mName = name;
mValue = value;
}
@Override
public void execute(CommandStack stack) {
stack.getBuilder().addVariable(mName, mValue);
}
}
private static class SetFilterInputCommand implements Command {
private String mFilterName;
private String mFilterInput;
private Object mValue;
public SetFilterInputCommand(String filterName, String input, Object value) {
mFilterName = filterName;
mFilterInput = input;
mValue = value;
}
@Override
public void execute(CommandStack stack) {
if (mValue instanceof Variable) {
String varName = ((Variable)mValue).name;
stack.getBuilder().assignVariableToFilterInput(varName, mFilterName, mFilterInput);
} else {
stack.getBuilder().assignValueToFilterInput(mValue, mFilterName, mFilterInput);
}
}
}
private static class ConnectCommand implements Command {
private String mSourceFilter;
private String mSourcePort;
private String mTargetFilter;
private String mTargetPort;
public ConnectCommand(String sourceFilter,
String sourcePort,
String targetFilter,
String targetPort) {
mSourceFilter = sourceFilter;
mSourcePort = sourcePort;
mTargetFilter = targetFilter;
mTargetPort = targetPort;
}
@Override
public void execute(CommandStack stack) {
stack.getBuilder().connect(mSourceFilter, mSourcePort, mTargetFilter, mTargetPort);
}
}
private static class Variable {
public String name;
public Variable(String name) {
this.name = name;
}
}
private static class XmlGraphReader {
private SAXParserFactory mParserFactory;
private static class GraphDataHandler extends DefaultHandler {
private CommandStack mCommandStack;
private boolean mInGraph = false;
private String mCurFilterName = null;
public GraphDataHandler(CommandStack commandStack) {
mCommandStack = commandStack;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attr)
throws SAXException {
if (localName.equals("graph")) {
beginGraph();
} else {
assertInGraph(localName);
if (localName.equals("import")) {
addImportCommand(attr);
} else if (localName.equals("library")) {
addLibraryCommand(attr);
} else if (localName.equals("connect")) {
addConnectCommand(attr);
} else if (localName.equals("var")) {
addVarCommand(attr);
} else if (localName.equals("filter")) {
beginFilter(attr);
} else if (localName.equals("input")) {
addFilterInput(attr);
} else {
throw new SAXException("Unknown XML element '" + localName + "'!");
}
}
}
@Override
public void endElement (String uri, String localName, String qName) {
if (localName.equals("graph")) {
endGraph();
} else if (localName.equals("filter")) {
endFilter();
}
}
private void addImportCommand(Attributes attributes) throws SAXException {
String packageName = getRequiredAttribute(attributes, "package");
mCommandStack.append(new ImportPackageCommand(packageName));
}
private void addLibraryCommand(Attributes attributes) throws SAXException {
String libraryName = getRequiredAttribute(attributes, "name");
mCommandStack.append(new AddLibraryCommand(libraryName));
}
private void addConnectCommand(Attributes attributes) {
String sourcePortName = null;
String sourceFilterName = null;
String targetPortName = null;
String targetFilterName = null;
// check for shorthand: <connect source="filter:port" target="filter:port"/>
String sourceTag = attributes.getValue("source");
if (sourceTag != null) {
String[] sourceParts = sourceTag.split(":");
if (sourceParts.length == 2) {
sourceFilterName = sourceParts[0];
sourcePortName = sourceParts[1];
} else {
throw new RuntimeException(
"'source' tag needs to have format \"filter:port\"! " +
"Alternatively, you may use the form " +
"'sourceFilter=\"filter\" sourcePort=\"port\"'.");
}
} else {
sourceFilterName = attributes.getValue("sourceFilter");
sourcePortName = attributes.getValue("sourcePort");
}
String targetTag = attributes.getValue("target");
if (targetTag != null) {
String[] targetParts = targetTag.split(":");
if (targetParts.length == 2) {
targetFilterName = targetParts[0];
targetPortName = targetParts[1];
} else {
throw new RuntimeException(
"'target' tag needs to have format \"filter:port\"! " +
"Alternatively, you may use the form " +
"'targetFilter=\"filter\" targetPort=\"port\"'.");
}
} else {
targetFilterName = attributes.getValue("targetFilter");
targetPortName = attributes.getValue("targetPort");
}
String sourceSlotName = attributes.getValue("sourceSlot");
String targetSlotName = attributes.getValue("targetSlot");
if (sourceSlotName != null) {
sourceFilterName = "sourceSlot_" + sourceSlotName;
mCommandStack.append(new AddSourceSlotCommand(sourceFilterName,
sourceSlotName));
sourcePortName = "frame";
}
if (targetSlotName != null) {
targetFilterName = "targetSlot_" + targetSlotName;
mCommandStack.append(new AddTargetSlotCommand(targetFilterName,
targetSlotName));
targetPortName = "frame";
}
assertValueNotNull("sourceFilter", sourceFilterName);
assertValueNotNull("sourcePort", sourcePortName);
assertValueNotNull("targetFilter", targetFilterName);
assertValueNotNull("targetPort", targetPortName);
// TODO: Should slot connections auto-branch?
mCommandStack.append(new ConnectCommand(sourceFilterName,
sourcePortName,
targetFilterName,
targetPortName));
}
private void addVarCommand(Attributes attributes) throws SAXException {
String varName = getRequiredAttribute(attributes, "name");
Object varValue = getAssignmentValue(attributes);
mCommandStack.append(new AddVariableCommand(varName, varValue));
}
private void beginGraph() throws SAXException {
if (mInGraph) {
throw new SAXException("Found more than one graph element in XML!");
}
mInGraph = true;
}
private void endGraph() {
mInGraph = false;
}
private void beginFilter(Attributes attributes) throws SAXException {
String className = getRequiredAttribute(attributes, "class");
mCurFilterName = getRequiredAttribute(attributes, "name");
mCommandStack.append(new AllocateFilterCommand(className, mCurFilterName));
}
private void endFilter() {
mCurFilterName = null;
}
private void addFilterInput(Attributes attributes) throws SAXException {
// Make sure we are in a filter element
if (mCurFilterName == null) {
throw new SAXException("Found 'input' element outside of 'filter' "
+ "element!");
}
// Get input name and value
String inputName = getRequiredAttribute(attributes, "name");
Object inputValue = getAssignmentValue(attributes);
if (inputValue == null) {
throw new SAXException("No value specified for input '" + inputName + "' "
+ "of filter '" + mCurFilterName + "'!");
}
// Push commmand
mCommandStack.append(new SetFilterInputCommand(mCurFilterName,
inputName,
inputValue));
}
private void assertInGraph(String localName) throws SAXException {
if (!mInGraph) {
throw new SAXException("Encountered '" + localName + "' element outside of "
+ "'graph' element!");
}
}
private static Object getAssignmentValue(Attributes attributes) {
String strValue = null;
if ((strValue = attributes.getValue("stringValue")) != null) {
return strValue;
} else if ((strValue = attributes.getValue("booleanValue")) != null) {
return Boolean.parseBoolean(strValue);
} else if ((strValue = attributes.getValue("intValue")) != null) {
return Integer.parseInt(strValue);
} else if ((strValue = attributes.getValue("floatValue")) != null) {
return Float.parseFloat(strValue);
} else if ((strValue = attributes.getValue("floatsValue")) != null) {
String[] floatStrings = TextUtils.split(strValue, ",");
float[] result = new float[floatStrings.length];
for (int i = 0; i < floatStrings.length; ++i) {
result[i] = Float.parseFloat(floatStrings[i]);
}
return result;
} else if ((strValue = attributes.getValue("varValue")) != null) {
return new Variable(strValue);
} else {
return null;
}
}
private static String getRequiredAttribute(Attributes attributes, String name)
throws SAXException {
String result = attributes.getValue(name);
if (result == null) {
throw new SAXException("Required attribute '" + name + "' not found!");
}
return result;
}
private static void assertValueNotNull(String valueName, Object value) {
if (value == null) {
throw new NullPointerException("Required value '" + value + "' not specified!");
}
}
}
public XmlGraphReader() {
mParserFactory = SAXParserFactory.newInstance();
}
public void parseString(String graphString, CommandStack commandStack) throws IOException {
try {
XMLReader reader = getReaderForCommandStack(commandStack);
reader.parse(new InputSource(new StringReader(graphString)));
} catch (SAXException e) {
throw new IOException("XML parse error during graph parsing!", e);
}
}
public void parseInput(InputStream inputStream, CommandStack commandStack)
throws IOException {
try {
XMLReader reader = getReaderForCommandStack(commandStack);
reader.parse(new InputSource(inputStream));
} catch (SAXException e) {
throw new IOException("XML parse error during graph parsing!", e);
}
}
private XMLReader getReaderForCommandStack(CommandStack commandStack) throws IOException {
try {
SAXParser parser = mParserFactory.newSAXParser();
XMLReader reader = parser.getXMLReader();
GraphDataHandler graphHandler = new GraphDataHandler(commandStack);
reader.setContentHandler(graphHandler);
return reader;
} catch (ParserConfigurationException e) {
throw new IOException("Error creating SAXParser for graph parsing!", e);
} catch (SAXException e) {
throw new IOException("Error creating XMLReader for graph parsing!", e);
}
}
}
/**
* Read an XML graph from a String.
*
* This function automatically checks each filters' signatures and throws a Runtime Exception
* if required ports are unconnected. Use the 3-parameter version to avoid this behavior.
*
* @param context the MffContext into which to load the graph.
* @param xmlSource the graph specified in XML.
* @return the FilterGraph instance for the XML source.
* @throws IOException if there was an error parsing the source.
*/
public static FilterGraph readXmlGraph(MffContext context, String xmlSource)
throws IOException {
FilterGraph.Builder builder = getBuilderForXmlString(context, xmlSource);
return builder.build();
}
/**
* Read an XML sub-graph from a String.
*
* @param context the MffContext into which to load the graph.
* @param xmlSource the graph specified in XML.
* @param parentGraph the parent graph.
* @return the FilterGraph instance for the XML source.
* @throws IOException if there was an error parsing the source.
*/
public static FilterGraph readXmlSubGraph(
MffContext context, String xmlSource, FilterGraph parentGraph)
throws IOException {
FilterGraph.Builder builder = getBuilderForXmlString(context, xmlSource);
return builder.buildSubGraph(parentGraph);
}
/**
* Read an XML graph from a resource.
*
* This function automatically checks each filters' signatures and throws a Runtime Exception
* if required ports are unconnected. Use the 3-parameter version to avoid this behavior.
*
* @param context the MffContext into which to load the graph.
* @param resourceId the XML resource ID.
* @return the FilterGraph instance for the XML source.
* @throws IOException if there was an error reading or parsing the resource.
*/
public static FilterGraph readXmlGraphResource(MffContext context, int resourceId)
throws IOException {
FilterGraph.Builder builder = getBuilderForXmlResource(context, resourceId);
return builder.build();
}
/**
* Read an XML graph from a resource.
*
* This function automatically checks each filters' signatures and throws a Runtime Exception
* if required ports are unconnected. Use the 3-parameter version to avoid this behavior.
*
* @param context the MffContext into which to load the graph.
* @param resourceId the XML resource ID.
* @return the FilterGraph instance for the XML source.
* @throws IOException if there was an error reading or parsing the resource.
*/
public static FilterGraph readXmlSubGraphResource(
MffContext context, int resourceId, FilterGraph parentGraph)
throws IOException {
FilterGraph.Builder builder = getBuilderForXmlResource(context, resourceId);
return builder.buildSubGraph(parentGraph);
}
private static FilterGraph.Builder getBuilderForXmlString(MffContext context, String source)
throws IOException {
XmlGraphReader reader = new XmlGraphReader();
CommandStack commands = new CommandStack(context);
reader.parseString(source, commands);
commands.execute();
return commands.getBuilder();
}
private static FilterGraph.Builder getBuilderForXmlResource(MffContext context, int resourceId)
throws IOException {
InputStream inputStream = context.getApplicationContext().getResources()
.openRawResource(resourceId);
XmlGraphReader reader = new XmlGraphReader();
CommandStack commands = new CommandStack(context);
reader.parseInput(inputStream, commands);
commands.execute();
return commands.getBuilder();
}
}

View File

@ -0,0 +1,793 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.graphics.RectF;
import android.opengl.GLES20;
import android.util.Log;
import androidx.media.filterfw.geometry.Quad;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.Arrays;
import java.util.HashMap;
/**
* Convenience class to perform GL shader operations on image data.
* <p>
* The ImageShader class greatly simplifies the task of running GL shader language kernels over
* Frame data buffers that contain RGBA image data.
* </p><p>
* TODO: More documentation
* </p>
*/
public class ImageShader {
private int mProgram = 0;
private boolean mClearsOutput = false;
private float[] mClearColor = { 0f, 0f, 0f, 0f };
private boolean mBlendEnabled = false;
private int mSFactor = GLES20.GL_SRC_ALPHA;
private int mDFactor = GLES20.GL_ONE_MINUS_SRC_ALPHA;
private int mDrawMode = GLES20.GL_TRIANGLE_STRIP;
private int mVertexCount = 4;
private int mBaseTexUnit = GLES20.GL_TEXTURE0;
private int mClearBuffers = GLES20.GL_COLOR_BUFFER_BIT;
private float[] mSourceCoords = new float[] { 0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f };
private float[] mTargetCoords = new float[] { -1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f };
private HashMap<String, ProgramUniform> mUniforms;
private HashMap<String, VertexAttribute> mAttributes = new HashMap<String, VertexAttribute>();
private final static int FLOAT_SIZE = 4;
private final static String mDefaultVertexShader =
"attribute vec4 a_position;\n" +
"attribute vec2 a_texcoord;\n" +
"varying vec2 v_texcoord;\n" +
"void main() {\n" +
" gl_Position = a_position;\n" +
" v_texcoord = a_texcoord;\n" +
"}\n";
private final static String mIdentityShader =
"precision mediump float;\n" +
"uniform sampler2D tex_sampler_0;\n" +
"varying vec2 v_texcoord;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(tex_sampler_0, v_texcoord);\n" +
"}\n";
private static class VertexAttribute {
private String mName;
private boolean mIsConst;
private int mIndex;
private boolean mShouldNormalize;
private int mOffset;
private int mStride;
private int mComponents;
private int mType;
private int mVbo;
private int mLength;
private FloatBuffer mValues;
public VertexAttribute(String name, int index) {
mName = name;
mIndex = index;
mLength = -1;
}
public void set(boolean normalize, int stride, int components, int type, float[] values) {
mIsConst = false;
mShouldNormalize = normalize;
mStride = stride;
mComponents = components;
mType = type;
mVbo = 0;
if (mLength != values.length){
initBuffer(values);
mLength = values.length;
}
copyValues(values);
}
public void set(boolean normalize, int offset, int stride, int components, int type,
int vbo){
mIsConst = false;
mShouldNormalize = normalize;
mOffset = offset;
mStride = stride;
mComponents = components;
mType = type;
mVbo = vbo;
mValues = null;
}
public boolean push() {
if (mIsConst) {
switch (mComponents) {
case 1:
GLES20.glVertexAttrib1fv(mIndex, mValues);
break;
case 2:
GLES20.glVertexAttrib2fv(mIndex, mValues);
break;
case 3:
GLES20.glVertexAttrib3fv(mIndex, mValues);
break;
case 4:
GLES20.glVertexAttrib4fv(mIndex, mValues);
break;
default:
return false;
}
GLES20.glDisableVertexAttribArray(mIndex);
} else {
if (mValues != null) {
// Note that we cannot do any size checking here, as the correct component
// count depends on the drawing step. GL should catch such errors then, and
// we will report them to the user.
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
GLES20.glVertexAttribPointer(mIndex,
mComponents,
mType,
mShouldNormalize,
mStride,
mValues);
} else {
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVbo);
GLES20.glVertexAttribPointer(mIndex,
mComponents,
mType,
mShouldNormalize,
mStride,
mOffset);
}
GLES20.glEnableVertexAttribArray(mIndex);
}
GLToolbox.checkGlError("Set vertex-attribute values");
return true;
}
@Override
public String toString() {
return mName;
}
private void initBuffer(float[] values) {
mValues = ByteBuffer.allocateDirect(values.length * FLOAT_SIZE)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
}
private void copyValues(float[] values) {
mValues.put(values).position(0);
}
}
private static final class ProgramUniform {
private String mName;
private int mLocation;
private int mType;
private int mSize;
public ProgramUniform(int program, int index) {
int[] len = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_ACTIVE_UNIFORM_MAX_LENGTH, len, 0);
int[] type = new int[1];
int[] size = new int[1];
byte[] name = new byte[len[0]];
int[] ignore = new int[1];
GLES20.glGetActiveUniform(program, index, len[0], ignore, 0, size, 0, type, 0, name, 0);
mName = new String(name, 0, strlen(name));
mLocation = GLES20.glGetUniformLocation(program, mName);
mType = type[0];
mSize = size[0];
GLToolbox.checkGlError("Initializing uniform");
}
public String getName() {
return mName;
}
public int getType() {
return mType;
}
public int getLocation() {
return mLocation;
}
public int getSize() {
return mSize;
}
}
public ImageShader(String fragmentShader) {
mProgram = createProgram(mDefaultVertexShader, fragmentShader);
scanUniforms();
}
public ImageShader(String vertexShader, String fragmentShader) {
mProgram = createProgram(vertexShader, fragmentShader);
scanUniforms();
}
public static ImageShader createIdentity() {
return new ImageShader(mIdentityShader);
}
public static ImageShader createIdentity(String vertexShader) {
return new ImageShader(vertexShader, mIdentityShader);
}
public static void renderTextureToTarget(TextureSource texture,
RenderTarget target,
int width,
int height) {
ImageShader shader = RenderTarget.currentTarget().getIdentityShader();
shader.process(texture, target, width, height);
}
public void process(FrameImage2D input, FrameImage2D output) {
TextureSource texSource = input.lockTextureSource();
RenderTarget renderTarget = output.lockRenderTarget();
processMulti(new TextureSource[] { texSource },
renderTarget,
output.getWidth(),
output.getHeight());
input.unlock();
output.unlock();
}
public void processMulti(FrameImage2D[] inputs, FrameImage2D output) {
TextureSource[] texSources = new TextureSource[inputs.length];
for (int i = 0; i < inputs.length; ++i) {
texSources[i] = inputs[i].lockTextureSource();
}
RenderTarget renderTarget = output.lockRenderTarget();
processMulti(texSources,
renderTarget,
output.getWidth(),
output.getHeight());
for (FrameImage2D input : inputs) {
input.unlock();
}
output.unlock();
}
public void process(TextureSource texture, RenderTarget target, int width, int height) {
processMulti(new TextureSource[] { texture }, target, width, height);
}
public void processMulti(TextureSource[] sources, RenderTarget target, int width, int height) {
GLToolbox.checkGlError("Unknown Operation");
checkExecutable();
checkTexCount(sources.length);
focusTarget(target, width, height);
pushShaderState();
bindInputTextures(sources);
render();
}
public void processNoInput(FrameImage2D output) {
RenderTarget renderTarget = output.lockRenderTarget();
processNoInput(renderTarget, output.getWidth(), output.getHeight());
output.unlock();
}
public void processNoInput(RenderTarget target, int width, int height) {
processMulti(new TextureSource[] {}, target, width, height);
}
public int getUniformLocation(String name) {
return getProgramUniform(name, true).getLocation();
}
public int getAttributeLocation(String name) {
if (name.equals(positionAttributeName()) || name.equals(texCoordAttributeName())) {
Log.w("ImageShader", "Attempting to access internal attribute '" + name
+ "' directly!");
}
int loc = GLES20.glGetAttribLocation(mProgram, name);
if (loc < 0) {
throw new RuntimeException("Unknown attribute '" + name + "' in shader program!");
}
return loc;
}
public void setUniformValue(String uniformName, int value) {
useProgram();
int uniformHandle = getUniformLocation(uniformName);
GLES20.glUniform1i(uniformHandle, value);
GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
}
public void setUniformValue(String uniformName, float value) {
useProgram();
int uniformHandle = getUniformLocation(uniformName);
GLES20.glUniform1f(uniformHandle, value);
GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
}
public void setUniformValue(String uniformName, int[] values) {
ProgramUniform uniform = getProgramUniform(uniformName, true);
useProgram();
int len = values.length;
switch (uniform.getType()) {
case GLES20.GL_INT:
checkUniformAssignment(uniform, len, 1);
GLES20.glUniform1iv(uniform.getLocation(), len, values, 0);
break;
case GLES20.GL_INT_VEC2:
checkUniformAssignment(uniform, len, 2);
GLES20.glUniform2iv(uniform.getLocation(), len / 2, values, 0);
break;
case GLES20.GL_INT_VEC3:
checkUniformAssignment(uniform, len, 3);
GLES20.glUniform2iv(uniform.getLocation(), len / 3, values, 0);
break;
case GLES20.GL_INT_VEC4:
checkUniformAssignment(uniform, len, 4);
GLES20.glUniform2iv(uniform.getLocation(), len / 4, values, 0);
break;
default:
throw new RuntimeException("Cannot assign int-array to incompatible uniform type "
+ "for uniform '" + uniformName + "'!");
}
GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
}
public void setUniformValue(String uniformName, float[] values) {
ProgramUniform uniform = getProgramUniform(uniformName, true);
useProgram();
int len = values.length;
switch (uniform.getType()) {
case GLES20.GL_FLOAT:
checkUniformAssignment(uniform, len, 1);
GLES20.glUniform1fv(uniform.getLocation(), len, values, 0);
break;
case GLES20.GL_FLOAT_VEC2:
checkUniformAssignment(uniform, len, 2);
GLES20.glUniform2fv(uniform.getLocation(), len / 2, values, 0);
break;
case GLES20.GL_FLOAT_VEC3:
checkUniformAssignment(uniform, len, 3);
GLES20.glUniform3fv(uniform.getLocation(), len / 3, values, 0);
break;
case GLES20.GL_FLOAT_VEC4:
checkUniformAssignment(uniform, len, 4);
GLES20.glUniform4fv(uniform.getLocation(), len / 4, values, 0);
break;
case GLES20.GL_FLOAT_MAT2:
checkUniformAssignment(uniform, len, 4);
GLES20.glUniformMatrix2fv(uniform.getLocation(), len / 4, false, values, 0);
break;
case GLES20.GL_FLOAT_MAT3:
checkUniformAssignment(uniform, len, 9);
GLES20.glUniformMatrix3fv(uniform.getLocation(), len / 9, false, values, 0);
break;
case GLES20.GL_FLOAT_MAT4:
checkUniformAssignment(uniform, len, 16);
GLES20.glUniformMatrix4fv(uniform.getLocation(), len / 16, false, values, 0);
break;
default:
throw new RuntimeException("Cannot assign float-array to incompatible uniform type "
+ "for uniform '" + uniformName + "'!");
}
GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
}
public void setAttributeValues(String attributeName, float[] data, int components) {
VertexAttribute attr = getProgramAttribute(attributeName, true);
attr.set(false, FLOAT_SIZE * components, components, GLES20.GL_FLOAT, data);
}
public void setAttributeValues(String attributeName, int vbo, int type, int components,
int stride, int offset, boolean normalize) {
VertexAttribute attr = getProgramAttribute(attributeName, true);
attr.set(normalize, offset, stride, components, type, vbo);
}
public void setSourceRect(float x, float y, float width, float height) {
setSourceCoords(new float[] { x, y, x + width, y, x, y + height, x + width, y + height });
}
public void setSourceRect(RectF rect) {
setSourceRect(rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top);
}
public void setSourceQuad(Quad quad) {
setSourceCoords(new float[] { quad.topLeft().x, quad.topLeft().y,
quad.topRight().x, quad.topRight().y,
quad.bottomLeft().x, quad.bottomLeft().y,
quad.bottomRight().x, quad.bottomRight().y });
}
public void setSourceCoords(float[] coords) {
if (coords.length != 8) {
throw new IllegalArgumentException("Expected 8 coordinates as source coordinates but "
+ "got " + coords.length + " coordinates!");
}
mSourceCoords = Arrays.copyOf(coords, 8);
}
public void setSourceTransform(float[] matrix) {
if (matrix.length != 16) {
throw new IllegalArgumentException("Expected 4x4 matrix for source transform!");
}
setSourceCoords(new float[] {
matrix[12],
matrix[13],
matrix[0] + matrix[12],
matrix[1] + matrix[13],
matrix[4] + matrix[12],
matrix[5] + matrix[13],
matrix[0] + matrix[4] + matrix[12],
matrix[1] + matrix[5] + matrix[13],
});
}
public void setTargetRect(float x, float y, float width, float height) {
setTargetCoords(new float[] { x, y,
x + width, y,
x, y + height,
x + width, y + height });
}
public void setTargetRect(RectF rect) {
setTargetCoords(new float[] { rect.left, rect.top,
rect.right, rect.top,
rect.left, rect.bottom,
rect.right, rect.bottom });
}
public void setTargetQuad(Quad quad) {
setTargetCoords(new float[] { quad.topLeft().x, quad.topLeft().y,
quad.topRight().x, quad.topRight().y,
quad.bottomLeft().x, quad.bottomLeft().y,
quad.bottomRight().x, quad.bottomRight().y });
}
public void setTargetCoords(float[] coords) {
if (coords.length != 8) {
throw new IllegalArgumentException("Expected 8 coordinates as target coordinates but "
+ "got " + coords.length + " coordinates!");
}
mTargetCoords = new float[8];
for (int i = 0; i < 8; ++i) {
mTargetCoords[i] = coords[i] * 2f - 1f;
}
}
public void setTargetTransform(float[] matrix) {
if (matrix.length != 16) {
throw new IllegalArgumentException("Expected 4x4 matrix for target transform!");
}
setTargetCoords(new float[] {
matrix[12],
matrix[13],
matrix[0] + matrix[12],
matrix[1] + matrix[13],
matrix[4] + matrix[12],
matrix[5] + matrix[13],
matrix[0] + matrix[4] + matrix[12],
matrix[1] + matrix[5] + matrix[13],
});
}
public void setClearsOutput(boolean clears) {
mClearsOutput = clears;
}
public boolean getClearsOutput() {
return mClearsOutput;
}
public void setClearColor(float[] rgba) {
mClearColor = rgba;
}
public float[] getClearColor() {
return mClearColor;
}
public void setClearBufferMask(int bufferMask) {
mClearBuffers = bufferMask;
}
public int getClearBufferMask() {
return mClearBuffers;
}
public void setBlendEnabled(boolean enable) {
mBlendEnabled = enable;
}
public boolean getBlendEnabled() {
return mBlendEnabled;
}
public void setBlendFunc(int sFactor, int dFactor) {
mSFactor = sFactor;
mDFactor = dFactor;
}
public void setDrawMode(int drawMode) {
mDrawMode = drawMode;
}
public int getDrawMode() {
return mDrawMode;
}
public void setVertexCount(int count) {
mVertexCount = count;
}
public int getVertexCount() {
return mVertexCount;
}
public void setBaseTextureUnit(int baseTexUnit) {
mBaseTexUnit = baseTexUnit;
}
public int baseTextureUnit() {
return mBaseTexUnit;
}
public String texCoordAttributeName() {
return "a_texcoord";
}
public String positionAttributeName() {
return "a_position";
}
public String inputTextureUniformName(int index) {
return "tex_sampler_" + index;
}
public static int maxTextureUnits() {
return GLES20.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS;
}
@Override
protected void finalize() throws Throwable {
GLES20.glDeleteProgram(mProgram);
}
protected void pushShaderState() {
useProgram();
updateSourceCoordAttribute();
updateTargetCoordAttribute();
pushAttributes();
if (mClearsOutput) {
GLES20.glClearColor(mClearColor[0], mClearColor[1], mClearColor[2], mClearColor[3]);
GLES20.glClear(mClearBuffers);
}
if (mBlendEnabled) {
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(mSFactor, mDFactor);
} else {
GLES20.glDisable(GLES20.GL_BLEND);
}
GLToolbox.checkGlError("Set render variables");
}
private void focusTarget(RenderTarget target, int width, int height) {
target.focus();
GLES20.glViewport(0, 0, width, height);
GLToolbox.checkGlError("glViewport");
}
private void bindInputTextures(TextureSource[] sources) {
for (int i = 0; i < sources.length; ++i) {
// Activate texture unit i
GLES20.glActiveTexture(baseTextureUnit() + i);
// Bind texture
sources[i].bind();
// Assign the texture uniform in the shader to unit i
int texUniform = GLES20.glGetUniformLocation(mProgram, inputTextureUniformName(i));
if (texUniform >= 0) {
GLES20.glUniform1i(texUniform, i);
} else {
throw new RuntimeException("Shader does not seem to support " + sources.length
+ " number of input textures! Missing uniform " + inputTextureUniformName(i)
+ "!");
}
GLToolbox.checkGlError("Binding input texture " + i);
}
}
private void pushAttributes() {
for (VertexAttribute attr : mAttributes.values()) {
if (!attr.push()) {
throw new RuntimeException("Unable to assign attribute value '" + attr + "'!");
}
}
GLToolbox.checkGlError("Push Attributes");
}
private void updateSourceCoordAttribute() {
// If attribute does not exist, simply do nothing (may be custom shader).
VertexAttribute attr = getProgramAttribute(texCoordAttributeName(), false);
// A non-null value of mSourceCoords indicates new values to be set.
if (mSourceCoords != null && attr != null) {
// Upload new source coordinates to GPU
attr.set(false, FLOAT_SIZE * 2, 2, GLES20.GL_FLOAT, mSourceCoords);
}
// Do not set again (even if failed, to not cause endless attempts)
mSourceCoords = null;
}
private void updateTargetCoordAttribute() {
// If attribute does not exist, simply do nothing (may be custom shader).
VertexAttribute attr = getProgramAttribute(positionAttributeName(), false);
// A non-null value of mTargetCoords indicates new values to be set.
if (mTargetCoords != null && attr != null) {
// Upload new target coordinates to GPU
attr.set(false, FLOAT_SIZE * 2, 2, GLES20.GL_FLOAT, mTargetCoords);
}
// Do not set again (even if failed, to not cause endless attempts)
mTargetCoords = null;
}
private void render() {
GLES20.glDrawArrays(mDrawMode, 0, mVertexCount);
GLToolbox.checkGlError("glDrawArrays");
}
private void checkExecutable() {
if (mProgram == 0) {
throw new RuntimeException("Attempting to execute invalid shader-program!");
}
}
private void useProgram() {
GLES20.glUseProgram(mProgram);
GLToolbox.checkGlError("glUseProgram");
}
private static void checkTexCount(int count) {
if (count > maxTextureUnits()) {
throw new RuntimeException("Number of textures passed (" + count + ") exceeds the "
+ "maximum number of allowed texture units (" + maxTextureUnits() + ")!");
}
}
private static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
String info = GLES20.glGetShaderInfoLog(shader);
GLES20.glDeleteShader(shader);
shader = 0;
throw new RuntimeException("Could not compile shader " + shaderType + ":" + info);
}
}
return shader;
}
private static int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
throw new RuntimeException("Could not create shader-program as vertex shader "
+ "could not be compiled!");
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
throw new RuntimeException("Could not create shader-program as fragment shader "
+ "could not be compiled!");
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
GLToolbox.checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
GLToolbox.checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
String info = GLES20.glGetProgramInfoLog(program);
GLES20.glDeleteProgram(program);
program = 0;
throw new RuntimeException("Could not link program: " + info);
}
}
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(pixelShader);
return program;
}
private void scanUniforms() {
int uniformCount[] = new int [1];
GLES20.glGetProgramiv(mProgram, GLES20.GL_ACTIVE_UNIFORMS, uniformCount, 0);
if (uniformCount[0] > 0) {
mUniforms = new HashMap<String, ProgramUniform>(uniformCount[0]);
for (int i = 0; i < uniformCount[0]; ++i) {
ProgramUniform uniform = new ProgramUniform(mProgram, i);
mUniforms.put(uniform.getName(), uniform);
}
}
}
private ProgramUniform getProgramUniform(String name, boolean required) {
ProgramUniform result = mUniforms.get(name);
if (result == null && required) {
throw new IllegalArgumentException("Unknown uniform '" + name + "'!");
}
return result;
}
private VertexAttribute getProgramAttribute(String name, boolean required) {
VertexAttribute result = mAttributes.get(name);
if (result == null) {
int handle = GLES20.glGetAttribLocation(mProgram, name);
if (handle >= 0) {
result = new VertexAttribute(name, handle);
mAttributes.put(name, result);
} else if (required) {
throw new IllegalArgumentException("Unknown attribute '" + name + "'!");
}
}
return result;
}
private void checkUniformAssignment(ProgramUniform uniform, int values, int components) {
if (values % components != 0) {
throw new RuntimeException("Size mismatch: Attempting to assign values of size "
+ values + " to uniform '" + uniform.getName() + "' (must be multiple of "
+ components + ")!");
} else if (uniform.getSize() != values / components) {
throw new RuntimeException("Size mismatch: Cannot assign " + values + " values to "
+ "uniform '" + uniform.getName() + "'!");
}
}
private static int strlen(byte[] strVal) {
for (int i = 0; i < strVal.length; ++i) {
if (strVal[i] == '\0') {
return i;
}
}
return strVal.length;
}
}

View File

@ -0,0 +1,327 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import java.lang.reflect.Field;
/**
* Input ports are the receiving ports of frames in a filter.
* <p>
* InputPort instances receive Frame data from connected OutputPort instances of a previous filter.
* Frames flow from output ports to input ports. Filters can process frame data by calling
* {@link #pullFrame()} on an input port. If the input port is set to wait for an input frame
* (see {@link #setWaitsForFrame(boolean)}), there is guaranteed to be Frame on the port before
* {@code onProcess()} is called. This is the default setting. Otherwise, calling
* {@link #pullFrame()} may return a value of {@code null}.
* <p/><p>
* InputPorts may be bound to fields of the Filter. When an input port is bound to a field, Frame
* values will be assigned to the field once a Frame is received on that port. The Frame value must
* be of a type that is compatible with the field type.
* </p>
*/
public final class InputPort {
private Filter mFilter;
private String mName;
private Signature.PortInfo mInfo;
private FrameListener mListener = null;
private FrameQueue.Builder mQueueBuilder = null;
private FrameQueue mQueue = null;
private boolean mWaitForFrame = true;
private boolean mAutoPullEnabled = false;
public interface FrameListener {
public void onFrameReceived(InputPort port, Frame frame);
}
private class FieldBinding implements FrameListener {
private Field mField;
public FieldBinding(Field field) {
mField = field;
}
@Override
public void onFrameReceived(InputPort port, Frame frame) {
try {
if(port.mInfo.type.getNumberOfDimensions() > 0) {
FrameValues frameValues = frame.asFrameValues();
mField.set(mFilter, frameValues.getValues());
} else {
FrameValue frameValue = frame.asFrameValue();
mField.set(mFilter, frameValue.getValue());
}
} catch (Exception e) {
throw new RuntimeException("Assigning frame " + frame + " to field "
+ mField + " of filter " + mFilter + " caused exception!", e);
}
}
}
/**
* Attach this input port to an output port for frame passing.
*
* Use this method whenever you plan on passing a Frame through from an input port to an
* output port. This must be called from inside
* {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}.
*
* @param outputPort the output port that Frames will be pushed to.
*/
public void attachToOutputPort(OutputPort outputPort) {
assertInAttachmentStage();
mFilter.openOutputPort(outputPort);
mQueueBuilder.attachQueue(outputPort.getQueue());
}
/**
* Bind this input port to the specified listener.
*
* Use this when you wish to be notified of incoming frames. The listener method
* {@link FrameListener#onFrameReceived(InputPort, Frame)} will be called once a Frame is pulled
* on this port. Typically this is called from inside
* {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}, and used in
* conjunction with {@link #setAutoPullEnabled(boolean)}. Overrides any previous bindings.
*
* @param listener the listener to handle incoming Frames.
*/
public void bindToListener(FrameListener listener) {
assertInAttachmentStage();
mListener = listener;
}
/**
* Bind this input port to the specified field.
*
* Use this when you wish to pull frames directly into a field of the filter. This requires
* that the input frames can be interpreted as object-based frames of the field's class.
* Overrides any previous bindings.
*
* This is typically called from inside
* {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}, and used in
* conjunction with {@link #setAutoPullEnabled(boolean)}.
*
* @param field the field to pull frame data into.
* @see #bindToFieldNamed(String)
* @see #setAutoPullEnabled(boolean)
*/
public void bindToField(Field field) {
assertInAttachmentStage();
mListener = new FieldBinding(field);
}
/**
* Bind this input port to the field with the specified name.
*
* Use this when you wish to pull frames directly into a field of the filter. This requires
* that the input frames can be interpreted as object-based frames of the field's class.
* Overrides any previous bindings.
*
* This is typically called from inside
* {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}, and used in
* conjunction with {@link #setAutoPullEnabled(boolean)}.
*
* @param fieldName the field to pull frame data into.
* @see #bindToField(Field)
* @see #setAutoPullEnabled(boolean)
*/
public void bindToFieldNamed(String fieldName) {
Field field = findFieldNamed(fieldName, mFilter.getClass());
if (field == null) {
throw new IllegalArgumentException("Attempting to bind to unknown field '"
+ fieldName + "'!");
}
bindToField(field);
}
/**
* Set whether the InputPort automatically pulls frames.
* This is typically only used when the port is bound to another target.
* @param enabled true, if frames should be automatically pulled on this port.
*/
public void setAutoPullEnabled(boolean enabled) {
mAutoPullEnabled = enabled;
}
/**
* Returns whether the InputPort automatically pulls frames.
* @return true, if frames are automatically pulled on this port.
*/
public boolean isAutoPullEnabled() {
return mAutoPullEnabled;
}
/**
* Pull a waiting a frame from the port.
*
* Call this to pull a frame from the input port for processing. If no frame is waiting on the
* input port, returns null. After this call the port will have no Frame waiting (empty port).
* Note, that this returns a frame owned by the input queue. You must detach the frame if you
* wish to hold on to it.
*
* @return Frame instance, or null if no frame is available for pulling.
*/
public synchronized Frame pullFrame() {
if (mQueue == null) {
throw new IllegalStateException("Cannot pull frame from closed input port!");
}
Frame frame = mQueue.pullFrame();
if (frame != null) {
if (mListener != null) {
mListener.onFrameReceived(this, frame);
}
//Log.i("InputPort", "Adding frame " + frame + " to auto-release pool");
mFilter.addAutoReleaseFrame(frame);
long timestamp = frame.getTimestamp();
if (timestamp != Frame.TIMESTAMP_NOT_SET) {
mFilter.onPulledFrameWithTimestamp(frame.getTimestamp());
}
}
return frame;
}
public synchronized Frame peek() {
if (mQueue == null) {
throw new IllegalStateException("Cannot pull frame from closed input port!");
}
return mQueue.peek();
}
/**
* Returns true, if the port is connected.
* @return true, if there is an output port that connects to this port.
*/
public boolean isConnected() {
return mQueue != null;
}
/**
* Returns true, if there is a frame waiting on this port.
* @return true, if there is a frame waiting on this port.
*/
public synchronized boolean hasFrame() {
return mQueue != null && mQueue.canPull();
}
/**
* Sets whether to wait for a frame on this port before processing.
* When set to true, the Filter will not be scheduled for processing unless there is a Frame
* waiting on this port. The default value is true.
*
* @param wait true, if the Filter should wait for a Frame before processing.
* @see #waitsForFrame()
*/
public void setWaitsForFrame(boolean wait) {
mWaitForFrame = wait;
}
/**
* Returns whether the filter waits for a frame on this port before processing.
* @return true, if the filter waits for a frame on this port before processing.
* @see #setWaitsForFrame(boolean)
*/
public boolean waitsForFrame() {
return mWaitForFrame;
}
/**
* Returns the input port's name.
* This is the name that was specified when the input port was connected.
*
* @return the input port's name.
*/
public String getName() {
return mName;
}
/**
* Returns the FrameType of this port.
* This is the type that was specified when the input port was declared.
*
* @return the input port's FrameType.
*/
public FrameType getType() {
return getQueue().getType();
}
/**
* Return the filter object that this port belongs to.
*
* @return the input port's filter.
*/
public Filter getFilter() {
return mFilter;
}
@Override
public String toString() {
return mFilter.getName() + ":" + mName;
}
// Internal only ///////////////////////////////////////////////////////////////////////////////
InputPort(Filter filter, String name, Signature.PortInfo info) {
mFilter = filter;
mName = name;
mInfo = info;
}
boolean conditionsMet() {
return !mWaitForFrame || hasFrame();
}
void onOpen(FrameQueue.Builder builder) {
mQueueBuilder = builder;
mQueueBuilder.setReadType(mInfo.type);
mFilter.onInputPortOpen(this);
}
void setQueue(FrameQueue queue) {
mQueue = queue;
mQueueBuilder = null;
}
FrameQueue getQueue() {
return mQueue;
}
void clear() {
if (mQueue != null) {
mQueue.clear();
}
}
private void assertInAttachmentStage() {
if (mQueueBuilder == null) {
throw new IllegalStateException("Attempting to attach port while not in attachment "
+ "stage!");
}
}
private Field findFieldNamed(String fieldName, Class<?> clazz) {
Field field = null;
try {
field = clazz.getDeclaredField(fieldName);
field.setAccessible(true);
} catch (NoSuchFieldException e) {
Class<?> superClass = clazz.getSuperclass();
if (superClass != null) {
field = findFieldNamed(fieldName, superClass);
}
}
return field;
}
}

View File

@ -0,0 +1,470 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.renderscript.RenderScript;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import java.util.HashSet;
import java.util.Set;
/**
* The MffContext holds the state and resources of a Mobile Filter Framework processing instance.
* Though it is possible to create multiple MffContext instances, typical applications will rely on
* a single MffContext to perform all processing within the Mobile Filter Framework.
*
* The MffContext class declares two methods {@link #onPause()} and {@link #onResume()}, that are
* typically called when the application activity is paused and resumed. This will take care of
* halting any processing in the context, and releasing resources while the activity is paused.
*/
public class MffContext {
/**
* Class to hold configuration information for MffContexts.
*/
public static class Config {
/**
* Set to true, if this context will make use of the camera.
* If your application does not require the camera, the context does not guarantee that
* a camera is available for streaming. That is, you may only use a CameraStreamer if
* the context's {@link #isCameraStreamingSupported()} returns true.
*/
public boolean requireCamera = true;
/**
* Set to true, if this context requires OpenGL.
* If your application does not require OpenGL, the context does not guarantee that OpenGL
* is available. That is, you may only use OpenGL (within filters running in this context)
* if the context's {@link #isOpenGLSupported()} method returns true.
*/
public boolean requireOpenGL = true;
/**
* On older Android versions the Camera may need a SurfaceView to render into in order to
* function. You may specify a dummy SurfaceView here if you do not want the context to
* create its own view. Note, that your view may or may not be used. You cannot rely on
* your dummy view to be used by the Camera. If you pass null, no dummy view will be used.
* In this case your application may not run correctly on older devices if you use the
* camera. This flag has no effect if you do not require the camera.
*/
public SurfaceView dummySurface = null;
/** Force MFF to not use OpenGL in its processing. */
public boolean forceNoGL = false;
}
static private class State {
public static final int STATE_RUNNING = 1;
public static final int STATE_PAUSED = 2;
public static final int STATE_DESTROYED = 3;
public int current = STATE_RUNNING;
}
/** The application context. */
private Context mApplicationContext = null;
/** The set of filter graphs within this context */
private Set<FilterGraph> mGraphs = new HashSet<FilterGraph>();
/** The set of graph runners within this context */
private Set<GraphRunner> mRunners = new HashSet<GraphRunner>();
/** True, if the context preserves frames when paused. */
private boolean mPreserveFramesOnPause = false;
/** The shared CameraStreamer that streams camera frames to CameraSource filters. */
private CameraStreamer mCameraStreamer = null;
/** The current context state. */
private State mState = new State();
/** A dummy SurfaceView that is required for Camera operation on older devices. */
private SurfaceView mDummySurfaceView = null;
/** Handler to execute code in the context's thread, such as issuing callbacks. */
private Handler mHandler = null;
/** Flag whether OpenGL ES 2 is supported in this context. */
private boolean mGLSupport;
/** Flag whether camera streaming is supported in this context. */
private boolean mCameraStreamingSupport;
/** RenderScript base master class. */
private RenderScript mRenderScript;
/**
* Creates a new MffContext with the default configuration.
*
* An MffContext must be attached to a Context object of an application. You may create
* multiple MffContexts, however data between them cannot be shared. The context must be
* created in a thread with a Looper (such as the main/UI thread).
*
* On older versions of Android, the MffContext may create a visible dummy view for the
* camera to render into. This is a 1x1 SurfaceView that is placed into the top-left corner.
*
* @param context The application context to attach the MffContext to.
*/
public MffContext(Context context) {
init(context, new Config());
}
/**
* Creates a new MffContext with the specified configuration.
*
* An MffContext must be attached to a Context object of an application. You may create
* multiple MffContexts, however data between them cannot be shared. The context must be
* created in a thread with a Looper (such as the main/UI thread).
*
* On older versions of Android, the MffContext may create a visible dummy view for the
* camera to render into. This is a 1x1 SurfaceView that is placed into the top-left corner.
* You may alternatively specify your own SurfaceView in the configuration.
*
* @param context The application context to attach the MffContext to.
* @param config The configuration to use.
*
* @throws RuntimeException If no context for the requested configuration could be created.
*/
public MffContext(Context context, Config config) {
init(context, config);
}
/**
* Put all processing in the context on hold.
* This is typically called from your application's <code>onPause()</code> method, and will
* stop all running graphs (closing their filters). If the context does not preserve frames on
* pause (see {@link #setPreserveFramesOnPause(boolean)}) all frames attached to this context
* are released.
*/
public void onPause() {
synchronized (mState) {
if (mState.current == State.STATE_RUNNING) {
if (mCameraStreamer != null) {
mCameraStreamer.halt();
}
stopRunners(true);
mState.current = State.STATE_PAUSED;
}
}
}
/**
* Resumes the processing in this context.
* This is typically called from the application's <code>onResume()</code> method, and will
* resume processing any of the previously stopped filter graphs.
*/
public void onResume() {
synchronized (mState) {
if (mState.current == State.STATE_PAUSED) {
resumeRunners();
resumeCamera();
mState.current = State.STATE_RUNNING;
}
}
}
/**
* Release all resources associated with this context.
* This will also stop any running graphs.
*/
public void release() {
synchronized (mState) {
if (mState.current != State.STATE_DESTROYED) {
if (mCameraStreamer != null) {
mCameraStreamer.stop();
mCameraStreamer.tearDown();
}
if (Build.VERSION.SDK_INT >= 11) {
maybeDestroyRenderScript();
}
stopRunners(false);
waitUntilStopped();
tearDown();
mState.current = State.STATE_DESTROYED;
}
}
}
/**
* Set whether frames are preserved when the context is paused.
* When passing false, all Frames associated with this context are released. The default
* value is true.
*
* @param preserve true, to preserve frames when the context is paused.
*
* @see #getPreserveFramesOnPause()
*/
public void setPreserveFramesOnPause(boolean preserve) {
mPreserveFramesOnPause = preserve;
}
/**
* Returns whether frames are preserved when the context is paused.
*
* @return true, if frames are preserved when the context is paused.
*
* @see #setPreserveFramesOnPause(boolean)
*/
public boolean getPreserveFramesOnPause() {
return mPreserveFramesOnPause;
}
/**
* Returns the application context that the MffContext is attached to.
*
* @return The application context for this context.
*/
public Context getApplicationContext() {
return mApplicationContext;
}
/**
* Returns the context's shared CameraStreamer.
* Use the CameraStreamer to control the Camera. Frames from the Camera are typically streamed
* to CameraSource filters.
*
* @return The context's CameraStreamer instance.
*/
public CameraStreamer getCameraStreamer() {
if (mCameraStreamer == null) {
mCameraStreamer = new CameraStreamer(this);
}
return mCameraStreamer;
}
/**
* Set the default EGL config chooser.
*
* When an EGL context is required by the MFF, the channel sizes specified here are used. The
* default sizes are 8 bits per R,G,B,A channel and 0 bits for depth and stencil channels.
*
* @param redSize The size of the red channel in bits.
* @param greenSize The size of the green channel in bits.
* @param blueSize The size of the blue channel in bits.
* @param alphaSize The size of the alpha channel in bits.
* @param depthSize The size of the depth channel in bits.
* @param stencilSize The size of the stencil channel in bits.
*/
public static void setEGLConfigChooser(int redSize,
int greenSize,
int blueSize,
int alphaSize,
int depthSize,
int stencilSize) {
RenderTarget.setEGLConfigChooser(redSize,
greenSize,
blueSize,
alphaSize,
depthSize,
stencilSize);
}
/**
* Returns true, if this context supports using OpenGL.
* @return true, if this context supports using OpenGL.
*/
public final boolean isOpenGLSupported() {
return mGLSupport;
}
/**
* Returns true, if this context supports camera streaming.
* @return true, if this context supports camera streaming.
*/
public final boolean isCameraStreamingSupported() {
return mCameraStreamingSupport;
}
@TargetApi(11)
public final RenderScript getRenderScript() {
if (mRenderScript == null) {
mRenderScript = RenderScript.create(mApplicationContext);
}
return mRenderScript;
}
final void assertOpenGLSupported() {
if (!isOpenGLSupported()) {
throw new RuntimeException("Attempting to use OpenGL ES 2 in a context that does not "
+ "support it!");
}
}
void addGraph(FilterGraph graph) {
synchronized (mGraphs) {
mGraphs.add(graph);
}
}
void addRunner(GraphRunner runner) {
synchronized (mRunners) {
mRunners.add(runner);
}
}
SurfaceView getDummySurfaceView() {
return mDummySurfaceView;
}
void postRunnable(Runnable runnable) {
mHandler.post(runnable);
}
private void init(Context context, Config config) {
determineGLSupport(context, config);
determineCameraSupport(config);
createHandler();
mApplicationContext = context.getApplicationContext();
fetchDummySurfaceView(context, config);
}
private void fetchDummySurfaceView(Context context, Config config) {
if (config.requireCamera && CameraStreamer.requireDummySurfaceView()) {
mDummySurfaceView = config.dummySurface != null
? config.dummySurface
: createDummySurfaceView(context);
}
}
private void determineGLSupport(Context context, Config config) {
if (config.forceNoGL) {
mGLSupport = false;
} else {
mGLSupport = getPlatformSupportsGLES2(context);
if (config.requireOpenGL && !mGLSupport) {
throw new RuntimeException("Cannot create context that requires GL support on "
+ "this platform!");
}
}
}
private void determineCameraSupport(Config config) {
mCameraStreamingSupport = (CameraStreamer.getNumberOfCameras() > 0);
if (config.requireCamera && !mCameraStreamingSupport) {
throw new RuntimeException("Cannot create context that requires a camera on "
+ "this platform!");
}
}
private static boolean getPlatformSupportsGLES2(Context context) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo configurationInfo = am.getDeviceConfigurationInfo();
return configurationInfo.reqGlEsVersion >= 0x20000;
}
private void createHandler() {
if (Looper.myLooper() == null) {
throw new RuntimeException("MffContext must be created in a thread with a Looper!");
}
mHandler = new Handler();
}
private void stopRunners(boolean haltOnly) {
synchronized (mRunners) {
// Halt all runners (does nothing if not running)
for (GraphRunner runner : mRunners) {
if (haltOnly) {
runner.halt();
} else {
runner.stop();
}
}
// Flush all graphs if requested (this is queued up after the call to halt)
if (!mPreserveFramesOnPause) {
for (GraphRunner runner : mRunners) {
runner.flushFrames();
}
}
}
}
private void resumeRunners() {
synchronized (mRunners) {
for (GraphRunner runner : mRunners) {
runner.restart();
}
}
}
private void resumeCamera() {
// Restart only affects previously halted cameras that were running.
if (mCameraStreamer != null) {
mCameraStreamer.restart();
}
}
private void waitUntilStopped() {
for (GraphRunner runner : mRunners) {
runner.waitUntilStop();
}
}
private void tearDown() {
// Tear down graphs
for (FilterGraph graph : mGraphs) {
graph.tearDown();
}
// Tear down runners
for (GraphRunner runner : mRunners) {
runner.tearDown();
}
}
@SuppressWarnings("deprecation")
private SurfaceView createDummySurfaceView(Context context) {
// This is only called on Gingerbread devices, so deprecation warning is unnecessary.
SurfaceView dummySurfaceView = new SurfaceView(context);
dummySurfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
// If we have an activity for this context we'll add the SurfaceView to it (as a 1x1 view
// in the top-left corner). If not, we warn the user that they may need to add one manually.
Activity activity = findActivityForContext(context);
if (activity != null) {
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(1, 1);
activity.addContentView(dummySurfaceView, params);
} else {
Log.w("MffContext", "Could not find activity for dummy surface! Consider specifying "
+ "your own SurfaceView!");
}
return dummySurfaceView;
}
private Activity findActivityForContext(Context context) {
return (context instanceof Activity) ? (Activity) context : null;
}
@TargetApi(11)
private void maybeDestroyRenderScript() {
if (mRenderScript != null) {
mRenderScript.destroy();
mRenderScript = null;
}
}
}

View File

@ -0,0 +1,92 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Make values from a motion sensor (e.g., accelerometer) available as filter outputs.
package androidx.media.filterpacks.sensors;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.FrameValues;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
public final class MotionSensor extends Filter implements SensorEventListener {
private SensorManager mSensorManager = null;
private Sensor mSensor = null;
private float[] mValues = new float[3];
public MotionSensor(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
return new Signature()
.addOutputPort("values", Signature.PORT_REQUIRED, FrameType.array(float.class))
.disallowOtherPorts();
}
@Override
protected void onPrepare() {
mSensorManager = (SensorManager)getContext().getApplicationContext()
.getSystemService(Context.SENSOR_SERVICE);
mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION);
// TODO: currently, the type of sensor is hardcoded. Should be able to set the sensor
// type as filter input!
mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_UI);
}
@Override
protected void onTearDown() {
mSensorManager.unregisterListener(this);
}
@Override
public final void onAccuracyChanged(Sensor sensor, int accuracy) {
// (Do we need to do something when sensor accuracy changes?)
}
@Override
public final void onSensorChanged(SensorEvent event) {
synchronized(mValues) {
mValues[0] = event.values[0];
mValues[1] = event.values[1];
mValues[2] = event.values[2];
}
}
@Override
protected void onProcess() {
OutputPort outPort = getConnectedOutputPort("values");
FrameValues outFrame = outPort.fetchAvailableFrame(null).asFrameValues();
synchronized(mValues) {
outFrame.setValues(mValues);
}
outFrame.setTimestamp(System.currentTimeMillis() * 1000000L);
outPort.pushFrame(outFrame);
}
}

View File

@ -0,0 +1,117 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Extract histogram from image.
package androidx.media.filterpacks.histogram;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.Frame;
import androidx.media.filterfw.FrameBuffer2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* ChromaHistogramFilter takes in an image in HSVA format and computes a 2-D histogram with a
* 2 dimensional chroma histogram based on hue (column) and saturation (row) at the top and
* a 1-D value histogram in the last row. The number of bin in the value histogram equals to
* the number of bins in hue.
*/
public final class NewChromaHistogramFilter extends Filter {
private int mHueBins = 6;
private int mSaturationBins = 3;
private int mValueBins;
private int mSaturationThreshold = 26; // 255 * 0.1
private int mValueThreshold = 51; // 255 * 0.2
public NewChromaHistogramFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU);
FrameType dataOut = FrameType.buffer2D(FrameType.ELEMENT_FLOAT32);
return new Signature()
.addInputPort("image", Signature.PORT_REQUIRED, imageIn)
.addInputPort("huebins", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.addInputPort("saturationbins", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.addInputPort("saturationthreshold", Signature.PORT_OPTIONAL,
FrameType.single(int.class))
.addInputPort("valuethreshold", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.addOutputPort("histogram", Signature.PORT_REQUIRED, dataOut)
.disallowOtherPorts();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("huebins")) {
port.bindToFieldNamed("mHueBins");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("saturationbins")) {
port.bindToFieldNamed("mSaturationBins");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("saturationthreshold")) {
port.bindToFieldNamed("mSaturationThreshold");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("valuethreshold")) {
port.bindToFieldNamed("mValueThreshold");
port.setAutoPullEnabled(true);
}
}
@Override
protected void onProcess() {
FrameBuffer2D imageFrame = getConnectedInputPort("image").pullFrame().asFrameImage2D();
OutputPort outPort = getConnectedOutputPort("histogram");
mValueBins = mHueBins;
int[] outDims = new int[] {mHueBins, mSaturationBins + 1};
FrameBuffer2D histogramFrame = outPort.fetchAvailableFrame(outDims).asFrameBuffer2D();
ByteBuffer imageBuffer = imageFrame.lockBytes(Frame.MODE_READ);
ByteBuffer histogramBuffer = histogramFrame.lockBytes(Frame.MODE_READ);
histogramBuffer.order(ByteOrder.nativeOrder());
FloatBuffer floatHistogram = histogramBuffer.asFloatBuffer();
// Run native method
extractChromaHistogram(imageBuffer, floatHistogram, mHueBins, mSaturationBins, mValueBins,
mSaturationThreshold, mValueThreshold);
imageFrame.unlock();
histogramFrame.unlock();
outPort.pushFrame(histogramFrame);
}
private static native void extractChromaHistogram(ByteBuffer imageBuffer,
FloatBuffer histogramBuffer, int hueBins, int saturationBins, int valueBins,
int saturationThreshold, int valueThreshold);
static {
System.loadLibrary("smartcamera_jni");
}
}

View File

@ -0,0 +1,63 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.numeric;
import android.util.Log;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.FrameValue;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
/**
* Filter to calculate the 2-norm of the inputs. i.e. sqrt(x^2 + y^2)
* TODO: Add support for more norms in the future.
*/
public final class NormFilter extends Filter {
private static final String TAG = "NormFilter";
private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
public NormFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType floatT = FrameType.single(float.class);
return new Signature()
.addInputPort("x", Signature.PORT_REQUIRED, floatT)
.addInputPort("y", Signature.PORT_REQUIRED, floatT)
.addOutputPort("norm", Signature.PORT_REQUIRED, floatT)
.disallowOtherPorts();
}
@Override
protected void onProcess() {
FrameValue xFrameValue = getConnectedInputPort("x").pullFrame().asFrameValue();
float xValue = ((Float)xFrameValue.getValue()).floatValue();
FrameValue yFrameValue = getConnectedInputPort("y").pullFrame().asFrameValue();
float yValue = ((Float)yFrameValue.getValue()).floatValue();
float norm = (float) Math.hypot(xValue, yValue);
if (mLogVerbose) Log.v(TAG, "Norm = " + norm);
OutputPort outPort = getConnectedOutputPort("norm");
FrameValue outFrame = outPort.fetchAvailableFrame(null).asFrameValue();
outFrame.setValue(norm);
outPort.pushFrame(outFrame);
}
}

View File

@ -0,0 +1,193 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
/**
* Output ports are the data emitting ports of filters.
* <p>
* Filters push data frames onto output-ports, which in turn push them onto their connected input
* ports. Output ports must be connected to an input port before data can be pushed onto them.
* Input and output ports share their Frame slot, meaning that when a frame is waiting on an output
* port, it is also waiting on the connected input port.
* </p><p>
* Only one frame can be pushed onto an output port at a time. In other words, a Frame must first
* be consumed by the target filter before a new frame can be pushed on the output port. If the
* output port is set to wait until it becomes free (see {@link #setWaitsUntilAvailable(boolean)}),
* it is guaranteed to be available when {@code onProcess()} is called. This is the default setting.
* </p>
*/
public final class OutputPort {
private Filter mFilter;
private String mName;
private Signature.PortInfo mInfo;
private FrameQueue.Builder mQueueBuilder = null;
private FrameQueue mQueue = null;
private boolean mWaitsUntilAvailable = true;
private InputPort mTarget = null;
/**
* Returns true, if this port is connected to a target port.
* @return true, if this port is connected to a target port.
*/
public boolean isConnected() {
return mTarget != null;
}
/**
* Returns true, if there is no frame waiting on this port.
* @return true, if no Frame instance is waiting on this port.
*/
public boolean isAvailable() {
return mQueue == null || mQueue.canPush();
}
/**
* Returns a frame for writing.
*
* Call this method to fetch a new frame to write into. When you have finished writing the
* frame data, you can push it into the output queue using {@link #pushFrame(Frame)}. Note,
* that the Frame returned is owned by the queue. If you wish to hold on to the frame, you
* must detach it.
*
* @param dimensions the size of the Frame you wish to obtain.
* @return a writable Frame instance.
*/
public Frame fetchAvailableFrame(int[] dimensions) {
Frame frame = getQueue().fetchAvailableFrame(dimensions);
if (frame != null) {
//Log.i("OutputPort", "Adding frame " + frame + " to auto-release pool");
mFilter.addAutoReleaseFrame(frame);
}
return frame;
}
/**
* Pushes a frame onto this output port.
*
* This is typically a Frame instance you obtained by previously calling
* {@link #fetchAvailableFrame(int[])}, but may come from other sources such as an input port
* that is attached to this output port.
*
* Once you have pushed a frame to an output, you may no longer modify it as it may be shared
* among other filters.
*
* @param frame the frame to push to the output queue.
*/
public void pushFrame(Frame frame) {
// Some queues allow pushing without fetching, so we need to make sure queue is open
// before pushing!
long timestamp = frame.getTimestamp();
if (timestamp == Frame.TIMESTAMP_NOT_SET)
frame.setTimestamp(mFilter.getCurrentTimestamp());
getQueue().pushFrame(frame);
}
/**
* Sets whether to wait until this port becomes available before processing.
* When set to true, the Filter will not be scheduled for processing unless there is no Frame
* waiting on this port. The default value is true.
*
* @param wait true, if filter should wait for the port to become available before processing.
* @see #waitsUntilAvailable()
*/
public void setWaitsUntilAvailable(boolean wait) {
mWaitsUntilAvailable = wait;
}
/**
* Returns whether the filter waits until this port is available before processing.
* @return true, if the filter waits until this port is available before processing.
* @see #setWaitsUntilAvailable(boolean)
*/
public boolean waitsUntilAvailable() {
return mWaitsUntilAvailable;
}
/**
* Returns the output port's name.
* This is the name that was specified when the output port was connected.
*
* @return the output port's name.
*/
public String getName() {
return mName;
}
/**
* Return the filter object that this port belongs to.
*
* @return the output port's filter.
*/
public Filter getFilter() {
return mFilter;
}
@Override
public String toString() {
return mFilter.getName() + ":" + mName;
}
OutputPort(Filter filter, String name, Signature.PortInfo info) {
mFilter = filter;
mName = name;
mInfo = info;
}
void setTarget(InputPort target) {
mTarget = target;
}
/**
* Return the (input) port that this output port is connected to.
*
* @return the connected port, null if not connected.
*/
public InputPort getTarget() {
return mTarget;
}
FrameQueue getQueue() {
return mQueue;
}
void setQueue(FrameQueue queue) {
mQueue = queue;
mQueueBuilder = null;
}
void onOpen(FrameQueue.Builder builder) {
mQueueBuilder = builder;
mQueueBuilder.setWriteType(mInfo.type);
mFilter.onOutputPortOpen(this);
}
boolean isOpen() {
return mQueue != null;
}
final boolean conditionsMet() {
return !mWaitsUntilAvailable || isAvailable();
}
void clear() {
if (mQueue != null) {
mQueue.clear();
}
}
}

View File

@ -0,0 +1,79 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import java.nio.ByteBuffer;
/**
* A collection of utilities to deal with pixel operations on ByteBuffers.
*/
public class PixelUtils {
/**
* Copy pixels from one buffer to another, applying a transformation.
*
* <p>The transformation is specified by specifying the initial offset in the output buffer, the
* stride (in pixels) between each pixel, and the stride (in pixels) between each row. The row
* stride is measured as the number of pixels between the start of each row.</p>
*
* <p>Note that this method is native for efficiency reasons. It does NOT do any bounds checking
* other than making sure the buffers are of sufficient size. This means that you can corrupt
* memory if specifying incorrect stride values!</p>
*
* @param input The input buffer containing pixel data.
* @param output The output buffer to hold the transformed pixel data.
* @param width The width of the input image.
* @param height The height of the input image.
* @param offset The start offset in the output (in pixels)
* @param pixStride The stride between each pixel (in pixels)
* @param rowStride The stride between the start of each row (in pixels)
*/
public static void copyPixels(ByteBuffer input,
ByteBuffer output,
int width,
int height,
int offset,
int pixStride,
int rowStride) {
if (input.remaining() != output.remaining()) {
throw new IllegalArgumentException("Input and output buffers must have the same size!");
} else if (input.remaining() % 4 != 0) {
throw new IllegalArgumentException("Input buffer size must be a multiple of 4!");
} else if (output.remaining() % 4 != 0) {
throw new IllegalArgumentException("Output buffer size must be a multiple of 4!");
} else if ((width * height * 4) != input.remaining()) {
throw new IllegalArgumentException(
"Input buffer size does not match given dimensions!");
} else if ((width * height * 4) != output.remaining()) {
throw new IllegalArgumentException(
"Output buffer size does not match given dimensions!");
}
nativeCopyPixels(input, output, width, height, offset, pixStride, rowStride);
}
private static native void nativeCopyPixels(ByteBuffer input,
ByteBuffer output,
int width,
int height,
int offset,
int pixStride,
int rowStride);
static {
System.loadLibrary("smartcamera_jni");
}
}

View File

@ -0,0 +1,444 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.media.MediaRecorder;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Build.VERSION;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import java.nio.ByteBuffer;
import java.util.HashMap;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
public final class RenderTarget {
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final int EGL_OPENGL_ES2_BIT = 4;
// Pre-HC devices do not necessarily support multiple display surfaces.
private static boolean mSupportsMultipleDisplaySurfaces = (VERSION.SDK_INT >= 11);
/** A Map that tracks which objects are wrapped by EGLSurfaces */
private static HashMap<Object, EGLSurface> mSurfaceSources = new HashMap<Object, EGLSurface>();
/** A Map for performing reference counting over shared objects across RenderTargets */
private static HashMap<Object, Integer> mRefCounts = new HashMap<Object, Integer>();
/** Stores the RenderTarget that is focused on the current thread. */
private static ThreadLocal<RenderTarget> mCurrentTarget = new ThreadLocal<RenderTarget>();
/** The source for the surface used in this target (if any) */
private Object mSurfaceSource = null;
/** The cached EGLConfig instance. */
private static EGLConfig mEglConfig = null;
/** The display for which the EGLConfig was chosen. We expect only one. */
private static EGLDisplay mConfiguredDisplay;
private EGL10 mEgl;
private EGLDisplay mDisplay;
private EGLContext mContext;
private EGLSurface mSurface;
private int mFbo;
private boolean mOwnsContext;
private boolean mOwnsSurface;
private static HashMap<EGLContext, ImageShader> mIdShaders
= new HashMap<EGLContext, ImageShader>();
private static HashMap<EGLContext, EGLSurface> mDisplaySurfaces
= new HashMap<EGLContext, EGLSurface>();
private static int sRedSize = 8;
private static int sGreenSize = 8;
private static int sBlueSize = 8;
private static int sAlphaSize = 8;
private static int sDepthSize = 0;
private static int sStencilSize = 0;
public static RenderTarget newTarget(int width, int height) {
EGL10 egl = (EGL10) EGLContext.getEGL();
EGLDisplay eglDisplay = createDefaultDisplay(egl);
EGLConfig eglConfig = chooseEglConfig(egl, eglDisplay);
EGLContext eglContext = createContext(egl, eglDisplay, eglConfig);
EGLSurface eglSurface = createSurface(egl, eglDisplay, width, height);
RenderTarget result = new RenderTarget(eglDisplay, eglContext, eglSurface, 0, true, true);
result.addReferenceTo(eglSurface);
return result;
}
public static RenderTarget currentTarget() {
// As RenderTargets are immutable, we can safely return the last focused instance on this
// thread, as we know it cannot have changed, and therefore must be current.
return mCurrentTarget.get();
}
public RenderTarget forTexture(TextureSource texture, int width, int height) {
// NOTE: We do not need to lookup any previous bindings of this texture to an FBO, as
// multiple FBOs to a single texture is valid.
int fbo = GLToolbox.generateFbo();
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
GLToolbox.checkGlError("glBindFramebuffer");
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
texture.getTarget(),
texture.getTextureId(),
0);
GLToolbox.checkGlError("glFramebufferTexture2D");
return new RenderTarget(mDisplay, mContext, surface(), fbo, false, false);
}
public RenderTarget forSurfaceHolder(SurfaceHolder surfaceHolder) {
EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
EGLSurface eglSurf = null;
synchronized (mSurfaceSources) {
eglSurf = mSurfaceSources.get(surfaceHolder);
if (eglSurf == null) {
eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surfaceHolder, null);
mSurfaceSources.put(surfaceHolder, eglSurf);
}
}
checkEglError(mEgl, "eglCreateWindowSurface");
checkSurface(mEgl, eglSurf);
RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
result.addReferenceTo(eglSurf);
result.setSurfaceSource(surfaceHolder);
return result;
}
@TargetApi(11)
public RenderTarget forSurfaceTexture(SurfaceTexture surfaceTexture) {
EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
EGLSurface eglSurf = null;
synchronized (mSurfaceSources) {
eglSurf = mSurfaceSources.get(surfaceTexture);
if (eglSurf == null) {
eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surfaceTexture, null);
mSurfaceSources.put(surfaceTexture, eglSurf);
}
}
checkEglError(mEgl, "eglCreateWindowSurface");
checkSurface(mEgl, eglSurf);
RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
result.setSurfaceSource(surfaceTexture);
result.addReferenceTo(eglSurf);
return result;
}
@TargetApi(11)
public RenderTarget forSurface(Surface surface) {
EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
EGLSurface eglSurf = null;
synchronized (mSurfaceSources) {
eglSurf = mSurfaceSources.get(surface);
if (eglSurf == null) {
eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surface, null);
mSurfaceSources.put(surface, eglSurf);
}
}
checkEglError(mEgl, "eglCreateWindowSurface");
checkSurface(mEgl, eglSurf);
RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
result.setSurfaceSource(surface);
result.addReferenceTo(eglSurf);
return result;
}
public static RenderTarget forMediaRecorder(MediaRecorder mediaRecorder) {
throw new RuntimeException("Not yet implemented MediaRecorder -> RenderTarget!");
}
public static void setEGLConfigChooser(int redSize, int greenSize, int blueSize, int alphaSize,
int depthSize, int stencilSize) {
sRedSize = redSize;
sGreenSize = greenSize;
sBlueSize = blueSize;
sAlphaSize = alphaSize;
sDepthSize = depthSize;
sStencilSize = stencilSize;
}
public void registerAsDisplaySurface() {
if (!mSupportsMultipleDisplaySurfaces) {
// Note that while this does in effect change RenderTarget instances (by modifying
// their returned EGLSurface), breaking the immutability requirement, it does not modify
// the current target. This is important so that the instance returned in
// currentTarget() remains accurate.
EGLSurface currentSurface = mDisplaySurfaces.get(mContext);
if (currentSurface != null && !currentSurface.equals(mSurface)) {
throw new RuntimeException("This device supports only a single display surface!");
} else {
mDisplaySurfaces.put(mContext, mSurface);
}
}
}
public void unregisterAsDisplaySurface() {
if (!mSupportsMultipleDisplaySurfaces) {
mDisplaySurfaces.put(mContext, null);
}
}
public void focus() {
RenderTarget current = mCurrentTarget.get();
// We assume RenderTargets are immutable, so that we do not need to focus if the current
// RenderTarget has not changed.
if (current != this) {
mEgl.eglMakeCurrent(mDisplay, surface(), surface(), mContext);
mCurrentTarget.set(this);
}
if (getCurrentFbo() != mFbo) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFbo);
GLToolbox.checkGlError("glBindFramebuffer");
}
}
public static void focusNone() {
EGL10 egl = (EGL10) EGLContext.getEGL();
egl.eglMakeCurrent(egl.eglGetCurrentDisplay(),
EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_CONTEXT);
mCurrentTarget.set(null);
checkEglError(egl, "eglMakeCurrent");
}
public void swapBuffers() {
mEgl.eglSwapBuffers(mDisplay, surface());
}
public EGLContext getContext() {
return mContext;
}
public static EGLContext currentContext() {
RenderTarget current = RenderTarget.currentTarget();
return current != null ? current.getContext() : EGL10.EGL_NO_CONTEXT;
}
public void release() {
if (mOwnsContext) {
mEgl.eglDestroyContext(mDisplay, mContext);
mContext = EGL10.EGL_NO_CONTEXT;
}
if (mOwnsSurface) {
synchronized (mSurfaceSources) {
if (removeReferenceTo(mSurface)) {
mEgl.eglDestroySurface(mDisplay, mSurface);
mSurface = EGL10.EGL_NO_SURFACE;
mSurfaceSources.remove(mSurfaceSource);
}
}
}
if (mFbo != 0) {
GLToolbox.deleteFbo(mFbo);
}
}
public void readPixelData(ByteBuffer pixels, int width, int height) {
GLToolbox.readTarget(this, pixels, width, height);
}
public ByteBuffer getPixelData(int width, int height) {
ByteBuffer pixels = ByteBuffer.allocateDirect(width * height * 4);
GLToolbox.readTarget(this, pixels, width, height);
return pixels;
}
/**
* Returns an identity shader for this context.
* You must not modify this shader. Use {@link ImageShader#createIdentity()} if you need to
* modify an identity shader.
*/
public ImageShader getIdentityShader() {
ImageShader idShader = mIdShaders.get(mContext);
if (idShader == null) {
idShader = ImageShader.createIdentity();
mIdShaders.put(mContext, idShader);
}
return idShader;
}
@Override
public String toString() {
return "RenderTarget(" + mDisplay + ", " + mContext + ", " + mSurface + ", " + mFbo + ")";
}
private void setSurfaceSource(Object source) {
mSurfaceSource = source;
}
private void addReferenceTo(Object object) {
Integer refCount = mRefCounts.get(object);
if (refCount != null) {
mRefCounts.put(object, refCount + 1);
} else {
mRefCounts.put(object, 1);
}
}
private boolean removeReferenceTo(Object object) {
Integer refCount = mRefCounts.get(object);
if (refCount != null && refCount > 0) {
--refCount;
mRefCounts.put(object, refCount);
return refCount == 0;
} else {
Log.e("RenderTarget", "Removing reference of already released: " + object + "!");
return false;
}
}
private static EGLConfig chooseEglConfig(EGL10 egl, EGLDisplay display) {
if (mEglConfig == null || !display.equals(mConfiguredDisplay)) {
int[] configsCount = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configSpec = getDesiredConfig();
if (!egl.eglChooseConfig(display, configSpec, configs, 1, configsCount)) {
throw new IllegalArgumentException("EGL Error: eglChooseConfig failed " +
getEGLErrorString(egl, egl.eglGetError()));
} else if (configsCount[0] > 0) {
mEglConfig = configs[0];
mConfiguredDisplay = display;
}
}
return mEglConfig;
}
private static int[] getDesiredConfig() {
return new int[] {
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, sRedSize,
EGL10.EGL_GREEN_SIZE, sGreenSize,
EGL10.EGL_BLUE_SIZE, sBlueSize,
EGL10.EGL_ALPHA_SIZE, sAlphaSize,
EGL10.EGL_DEPTH_SIZE, sDepthSize,
EGL10.EGL_STENCIL_SIZE, sStencilSize,
EGL10.EGL_NONE
};
}
private RenderTarget(EGLDisplay display, EGLContext context, EGLSurface surface, int fbo,
boolean ownsContext, boolean ownsSurface) {
mEgl = (EGL10) EGLContext.getEGL();
mDisplay = display;
mContext = context;
mSurface = surface;
mFbo = fbo;
mOwnsContext = ownsContext;
mOwnsSurface = ownsSurface;
}
private EGLSurface surface() {
if (mSupportsMultipleDisplaySurfaces) {
return mSurface;
} else {
EGLSurface displaySurface = mDisplaySurfaces.get(mContext);
return displaySurface != null ? displaySurface : mSurface;
}
}
private static void initEgl(EGL10 egl, EGLDisplay display) {
int[] version = new int[2];
if (!egl.eglInitialize(display, version)) {
throw new RuntimeException("EGL Error: eglInitialize failed " +
getEGLErrorString(egl, egl.eglGetError()));
}
}
private static EGLDisplay createDefaultDisplay(EGL10 egl) {
EGLDisplay display = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
checkDisplay(egl, display);
initEgl(egl, display);
return display;
}
private static EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig config) {
int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext ctxt = egl.eglCreateContext(display, config, EGL10.EGL_NO_CONTEXT, attrib_list);
checkContext(egl, ctxt);
return ctxt;
}
private static EGLSurface createSurface(EGL10 egl, EGLDisplay display, int width, int height) {
EGLConfig eglConfig = chooseEglConfig(egl, display);
int[] attribs = { EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE };
return egl.eglCreatePbufferSurface(display, eglConfig, attribs);
}
private static int getCurrentFbo() {
int[] result = new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, result, 0);
return result[0];
}
private static void checkDisplay(EGL10 egl, EGLDisplay display) {
if (display == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL Error: Bad display: "
+ getEGLErrorString(egl, egl.eglGetError()));
}
}
private static void checkContext(EGL10 egl, EGLContext context) {
if (context == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException("EGL Error: Bad context: "
+ getEGLErrorString(egl, egl.eglGetError()));
}
}
private static void checkSurface(EGL10 egl, EGLSurface surface) {
if (surface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("EGL Error: Bad surface: "
+ getEGLErrorString(egl, egl.eglGetError()));
}
}
private static void checkEglError(EGL10 egl, String command) {
int error = egl.eglGetError();
if (error != EGL10.EGL_SUCCESS) {
throw new RuntimeException("Error executing " + command + "! EGL error = 0x"
+ Integer.toHexString(error));
}
}
private static String getEGLErrorString(EGL10 egl, int eglError) {
if (VERSION.SDK_INT >= 14) {
return getEGLErrorStringICS(egl, eglError);
} else {
return "EGL Error 0x" + Integer.toHexString(eglError);
}
}
@TargetApi(14)
private static String getEGLErrorStringICS(EGL10 egl, int eglError) {
return GLUtils.getEGLErrorString(egl.eglGetError());
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.transform;
import androidx.media.filterfw.*;
// TODO: In the future this could be done with a meta-filter that simply "hard-codes" the crop
// parameters.
public class ResizeFilter extends CropFilter {
public ResizeFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
return new Signature()
.addInputPort("image", Signature.PORT_REQUIRED, imageIn)
.addInputPort("outputWidth", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.addInputPort("outputHeight", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.addInputPort("useMipmaps", Signature.PORT_OPTIONAL, FrameType.single(boolean.class))
.addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
.disallowOtherPorts();
}
}

View File

@ -0,0 +1,80 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.transform;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.FrameImage2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.ImageShader;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
import androidx.media.filterfw.geometry.Quad;
public class RotateFilter extends Filter {
private Quad mSourceRect = Quad.fromRect(0f, 0f, 1f, 1f);
private float mRotateAngle = 0;
private ImageShader mShader;
public RotateFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
return new Signature()
.addInputPort("image", Signature.PORT_REQUIRED, imageIn)
.addInputPort("rotateAngle", Signature.PORT_REQUIRED, FrameType.single(float.class))
.addInputPort("sourceRect", Signature.PORT_OPTIONAL, FrameType.single(Quad.class))
.addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
.disallowOtherPorts();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("rotateAngle")) {
port.bindToFieldNamed("mRotateAngle");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("sourceRect")) {
port.bindToFieldNamed("mSourceRect");
port.setAutoPullEnabled(true);
}
}
@Override
protected void onPrepare() {
mShader = ImageShader.createIdentity();
}
@Override
protected void onProcess() {
OutputPort outPort = getConnectedOutputPort("image");
FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
int[] inDims = inputImage.getDimensions();
FrameImage2D outputImage = outPort.fetchAvailableFrame(inDims).asFrameImage2D();
mShader.setSourceQuad(mSourceRect);
Quad targetQuad = mSourceRect.rotated((float) (mRotateAngle / 180 * Math.PI));
mShader.setTargetQuad(targetQuad);
mShader.process(inputImage, outputImage);
outPort.pushFrame(outputImage);
}
}

View File

@ -0,0 +1,66 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.transform;
// TODO: scale filter needs to be able to specify output width and height
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.Signature;
public class ScaleFilter extends ResizeFilter {
private float mScale = 1.0f;
public ScaleFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
return new Signature()
.addInputPort("image", Signature.PORT_REQUIRED, imageIn)
.addInputPort("scale", Signature.PORT_OPTIONAL, FrameType.single(float.class))
.addInputPort("useMipmaps", Signature.PORT_OPTIONAL, FrameType.single(boolean.class))
.addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
.disallowOtherPorts();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("scale")) {
port.bindToFieldNamed("mScale");
port.setAutoPullEnabled(true);
} else if (port.getName().equals("useMipmaps")) {
port.bindToFieldNamed("mUseMipmaps");
port.setAutoPullEnabled(true);
}
}
@Override
protected int getOutputWidth(int inWidth, int inHeight) {
return (int)(inWidth * mScale);
}
@Override
protected int getOutputHeight(int inWidth, int inHeight) {
return (int)(inHeight * mScale);
}
}

View File

@ -0,0 +1,241 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.Set;
/**
* A Signature holds the specification for a filter's input and output ports.
*
* A Signature instance must be returned by the filter's {@link Filter#getSignature()} method. It
* specifies the number and names of the filter's input and output ports, whether or not they
* are required, how data for those ports are accessed, and more. A Signature does not change over
* time. This makes Signatures useful for understanding how a filter can be integrated into a
* graph.
*
* There are a number of flags that can be specified for each input and output port. The flag
* {@code PORT_REQUIRED} indicates that the user must connect the specified port. On the other hand,
* {@code PORT_OPTIONAL} indicates that a port may be connected by the user.
*
* If ports other than the ones in the Signature are allowed, they default to the most generic
* format, that allows passing in any type of Frame. Thus, if more granular access is needed to
* a frame's data, it must be specified in the Signature.
*/
public class Signature {
private HashMap<String, PortInfo> mInputPorts = null;
private HashMap<String, PortInfo> mOutputPorts = null;
private boolean mAllowOtherInputs = true;
private boolean mAllowOtherOutputs = true;
static class PortInfo {
public int flags;
public FrameType type;
public PortInfo() {
flags = 0;
type = FrameType.any();
}
public PortInfo(int flags, FrameType type) {
this.flags = flags;
this.type = type;
}
public boolean isRequired() {
return (flags & PORT_REQUIRED) != 0;
}
public String toString(String ioMode, String name) {
String ioName = ioMode + " " + name;
String modeName = isRequired() ? "required" : "optional";
return modeName + " " + ioName + ": " + type.toString();
}
}
/** Indicates that the port must be connected in the graph. */
public static final int PORT_REQUIRED = 0x02;
/** Indicates that the port may be connected in the graph . */
public static final int PORT_OPTIONAL = 0x01;
/**
* Creates a new empty Signature.
*/
public Signature() {
}
/**
* Adds an input port to the Signature.
*
* @param name the name of the input port. Must be unique among input port names.
* @param flags a combination of port flags.
* @param type the type of the input frame.
* @return this Signature instance.
*/
public Signature addInputPort(String name, int flags, FrameType type) {
addInputPort(name, new PortInfo(flags, type));
return this;
}
/**
* Adds an output port to the Signature.
*
* @param name the name of the output port. Must be unique among output port names.
* @param flags a combination of port flags.
* @param type the type of the output frame.
* @return this Signature instance.
*/
public Signature addOutputPort(String name, int flags, FrameType type) {
addOutputPort(name, new PortInfo(flags, type));
return this;
}
/**
* Disallows the user from adding any other input ports.
* Adding any input port not explicitly specified in this Signature will cause an error.
* @return this Signature instance.
*/
public Signature disallowOtherInputs() {
mAllowOtherInputs = false;
return this;
}
/**
* Disallows the user from adding any other output ports.
* Adding any output port not explicitly specified in this Signature will cause an error.
* @return this Signature instance.
*/
public Signature disallowOtherOutputs() {
mAllowOtherOutputs = false;
return this;
}
/**
* Disallows the user from adding any other ports.
* Adding any input or output port not explicitly specified in this Signature will cause an
* error.
* @return this Signature instance.
*/
public Signature disallowOtherPorts() {
mAllowOtherInputs = false;
mAllowOtherOutputs = false;
return this;
}
@Override
public String toString() {
StringBuffer stringBuffer = new StringBuffer();
for (Entry<String, PortInfo> entry : mInputPorts.entrySet()) {
stringBuffer.append(entry.getValue().toString("input", entry.getKey()) + "\n");
}
for (Entry<String, PortInfo> entry : mOutputPorts.entrySet()) {
stringBuffer.append(entry.getValue().toString("output", entry.getKey()) + "\n");
}
if (!mAllowOtherInputs) {
stringBuffer.append("disallow other inputs\n");
}
if (!mAllowOtherOutputs) {
stringBuffer.append("disallow other outputs\n");
}
return stringBuffer.toString();
}
PortInfo getInputPortInfo(String name) {
PortInfo result = mInputPorts != null ? mInputPorts.get(name) : null;
return result != null ? result : new PortInfo();
}
PortInfo getOutputPortInfo(String name) {
PortInfo result = mOutputPorts != null ? mOutputPorts.get(name) : null;
return result != null ? result : new PortInfo();
}
void checkInputPortsConform(Filter filter) {
Set<String> filterInputs = new HashSet<String>();
filterInputs.addAll(filter.getConnectedInputPortMap().keySet());
if (mInputPorts != null) {
for (Entry<String, PortInfo> entry : mInputPorts.entrySet()) {
String portName = entry.getKey();
PortInfo portInfo = entry.getValue();
InputPort inputPort = filter.getConnectedInputPort(portName);
if (inputPort == null && portInfo.isRequired()) {
throw new RuntimeException("Filter " + filter + " does not have required "
+ "input port '" + portName + "'!");
}
filterInputs.remove(portName);
}
}
if (!mAllowOtherInputs && !filterInputs.isEmpty()) {
throw new RuntimeException("Filter " + filter + " has invalid input ports: "
+ filterInputs + "!");
}
}
void checkOutputPortsConform(Filter filter) {
Set<String> filterOutputs = new HashSet<String>();
filterOutputs.addAll(filter.getConnectedOutputPortMap().keySet());
if (mOutputPorts != null) {
for (Entry<String, PortInfo> entry : mOutputPorts.entrySet()) {
String portName = entry.getKey();
PortInfo portInfo = entry.getValue();
OutputPort outputPort = filter.getConnectedOutputPort(portName);
if (outputPort == null && portInfo.isRequired()) {
throw new RuntimeException("Filter " + filter + " does not have required "
+ "output port '" + portName + "'!");
}
filterOutputs.remove(portName);
}
}
if (!mAllowOtherOutputs && !filterOutputs.isEmpty()) {
throw new RuntimeException("Filter " + filter + " has invalid output ports: "
+ filterOutputs + "!");
}
}
HashMap<String, PortInfo> getInputPorts() {
return mInputPorts;
}
HashMap<String, PortInfo> getOutputPorts() {
return mOutputPorts;
}
private void addInputPort(String name, PortInfo portInfo) {
if (mInputPorts == null) {
mInputPorts = new HashMap<String, PortInfo>();
}
if (mInputPorts.containsKey(name)) {
throw new RuntimeException("Attempting to add duplicate input port '" + name + "'!");
}
mInputPorts.put(name, portInfo);
}
private void addOutputPort(String name, PortInfo portInfo) {
if (mOutputPorts == null) {
mOutputPorts = new HashMap<String, PortInfo>();
}
if (mOutputPorts.containsKey(name)) {
throw new RuntimeException("Attempting to add duplicate output port '" + name + "'!");
}
mOutputPorts.put(name, portInfo);
}
}

View File

@ -0,0 +1,38 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* This is a simple LRU cache that is used internally for managing repetitive objects.
*/
class SimpleCache<K, V> extends LinkedHashMap<K, V> {
private int mMaxEntries;
public SimpleCache(final int maxEntries) {
super(maxEntries + 1, 1f, true);
mMaxEntries = maxEntries;
}
@Override
protected boolean removeEldestEntry(final Map.Entry<K, V> eldest) {
return super.size() > mMaxEntries;
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
public abstract class SlotFilter extends Filter {
protected final String mSlotName;
protected SlotFilter(MffContext context, String name, String slotName) {
super(context, name);
mSlotName = slotName;
}
protected final FrameType getSlotType() {
return getFrameManager().getSlot(mSlotName).getType();
}
protected final boolean slotHasFrame() {
return getFrameManager().getSlot(mSlotName).hasFrame();
}
}

View File

@ -0,0 +1,174 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.image;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.Frame;
import androidx.media.filterfw.FrameImage2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.ImageShader;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
import java.nio.ByteBuffer;
public class SobelFilter extends Filter {
private static final String mGradientXSource =
"precision mediump float;\n"
+ "uniform sampler2D tex_sampler_0;\n"
+ "uniform vec2 pix;\n"
+ "varying vec2 v_texcoord;\n"
+ "void main() {\n"
+ " vec4 a1 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, -pix.y));\n"
+ " vec4 a2 = -2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, 0.0));\n"
+ " vec4 a3 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, +pix.y));\n"
+ " vec4 b1 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, -pix.y));\n"
+ " vec4 b2 = +2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, 0.0));\n"
+ " vec4 b3 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, +pix.y));\n"
+ " gl_FragColor = 0.5 + (a1 + a2 + a3 + b1 + b2 + b3) / 8.0;\n"
+ "}\n";
private static final String mGradientYSource =
"precision mediump float;\n"
+ "uniform sampler2D tex_sampler_0;\n"
+ "uniform vec2 pix;\n"
+ "varying vec2 v_texcoord;\n"
+ "void main() {\n"
+ " vec4 a1 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, -pix.y));\n"
+ " vec4 a2 = -2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(0.0, -pix.y));\n"
+ " vec4 a3 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, -pix.y));\n"
+ " vec4 b1 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, +pix.y));\n"
+ " vec4 b2 = +2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(0.0, +pix.y));\n"
+ " vec4 b3 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, +pix.y));\n"
+ " gl_FragColor = 0.5 + (a1 + a2 + a3 + b1 + b2 + b3) / 8.0;\n"
+ "}\n";
private static final String mMagnitudeSource =
"precision mediump float;\n"
+ "uniform sampler2D tex_sampler_0;\n"
+ "uniform sampler2D tex_sampler_1;\n"
+ "varying vec2 v_texcoord;\n"
+ "void main() {\n"
+ " vec4 gx = 2.0 * texture2D(tex_sampler_0, v_texcoord) - 1.0;\n"
+ " vec4 gy = 2.0 * texture2D(tex_sampler_1, v_texcoord) - 1.0;\n"
+ " gl_FragColor = vec4(sqrt(gx.rgb * gx.rgb + gy.rgb * gy.rgb), 1.0);\n"
+ "}\n";
private static final String mDirectionSource =
"precision mediump float;\n"
+ "uniform sampler2D tex_sampler_0;\n"
+ "uniform sampler2D tex_sampler_1;\n"
+ "varying vec2 v_texcoord;\n"
+ "void main() {\n"
+ " vec4 gy = 2.0 * texture2D(tex_sampler_1, v_texcoord) - 1.0;\n"
+ " vec4 gx = 2.0 * texture2D(tex_sampler_0, v_texcoord) - 1.0;\n"
+ " gl_FragColor = vec4((atan(gy.rgb, gx.rgb) + 3.14) / (2.0 * 3.14), 1.0);\n"
+ "}\n";
private ImageShader mGradientXShader;
private ImageShader mGradientYShader;
private ImageShader mMagnitudeShader;
private ImageShader mDirectionShader;
private FrameType mImageType;
public SobelFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
// TODO: we will address the issue of READ_GPU / WRITE_GPU when using CPU filters later.
FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
return new Signature().addInputPort("image", Signature.PORT_REQUIRED, imageIn)
.addOutputPort("direction", Signature.PORT_OPTIONAL, imageOut)
.addOutputPort("magnitude", Signature.PORT_OPTIONAL, imageOut).disallowOtherPorts();
}
@Override
protected void onPrepare() {
if (isOpenGLSupported()) {
mGradientXShader = new ImageShader(mGradientXSource);
mGradientYShader = new ImageShader(mGradientYSource);
mMagnitudeShader = new ImageShader(mMagnitudeSource);
mDirectionShader = new ImageShader(mDirectionSource);
mImageType = FrameType.image2D(
FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU | FrameType.WRITE_GPU);
}
}
@Override
protected void onProcess() {
OutputPort magnitudePort = getConnectedOutputPort("magnitude");
OutputPort directionPort = getConnectedOutputPort("direction");
FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
int[] inputDims = inputImage.getDimensions();
FrameImage2D magImage = (magnitudePort != null) ?
magnitudePort.fetchAvailableFrame(inputDims).asFrameImage2D() : null;
FrameImage2D dirImage = (directionPort != null) ?
directionPort.fetchAvailableFrame(inputDims).asFrameImage2D() : null;
if (isOpenGLSupported()) {
FrameImage2D gxFrame = Frame.create(mImageType, inputDims).asFrameImage2D();
FrameImage2D gyFrame = Frame.create(mImageType, inputDims).asFrameImage2D();
mGradientXShader.setUniformValue("pix", new float[] {1f/inputDims[0], 1f/inputDims[1]});
mGradientYShader.setUniformValue("pix", new float[] {1f/inputDims[0], 1f/inputDims[1]});
mGradientXShader.process(inputImage, gxFrame);
mGradientYShader.process(inputImage, gyFrame);
FrameImage2D[] gradientFrames = new FrameImage2D[] { gxFrame, gyFrame };
if (magnitudePort != null) {
mMagnitudeShader.processMulti(gradientFrames, magImage);
}
if (directionPort != null) {
mDirectionShader.processMulti(gradientFrames, dirImage);
}
gxFrame.release();
gyFrame.release();
} else {
ByteBuffer inputBuffer = inputImage.lockBytes(Frame.MODE_READ);
ByteBuffer magBuffer = (magImage != null) ?
magImage.lockBytes(Frame.MODE_WRITE) : null;
ByteBuffer dirBuffer = (dirImage != null) ?
dirImage.lockBytes(Frame.MODE_WRITE) : null;
sobelOperator(inputImage.getWidth(), inputImage.getHeight(),
inputBuffer, magBuffer, dirBuffer);
inputImage.unlock();
if (magImage != null) {
magImage.unlock();
}
if (dirImage != null) {
dirImage.unlock();
}
}
if (magImage != null) {
magnitudePort.pushFrame(magImage);
}
if (dirImage != null) {
directionPort.pushFrame(dirImage);
}
}
private static native boolean sobelOperator(int width, int height,
ByteBuffer imageBuffer, ByteBuffer magBuffer, ByteBuffer dirBudder);
static {
System.loadLibrary("smartcamera_jni");
}
}

View File

@ -0,0 +1,120 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Calculates the mean and standard deviation of the values in the input image.
// It takes in an RGBA image, but assumes that r, g, b, a are all the same values.
package androidx.media.filterpacks.numeric;
import android.util.Log;
import androidx.media.filterfw.Filter;
import androidx.media.filterfw.Frame;
import androidx.media.filterfw.FrameBuffer2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.FrameValue;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.OutputPort;
import androidx.media.filterfw.Signature;
import androidx.media.filterfw.geometry.Quad;
import java.nio.ByteBuffer;
/**
* Get the sample mean and variance of a 2-D buffer of bytes over a given rectangle.
* TODO: Add more statistics as needed.
* TODO: Check if crop rectangle is necessary to be included in this filter.
*/
public class StatsFilter extends Filter {
private static final int MEAN_INDEX = 0;
private static final int STDEV_INDEX = 1;
private final float[] mStats = new float[2];
private Quad mCropRect = Quad.fromRect(0f, 0f, 1f, 1f);
private static final String TAG = "StatsFilter";
private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
/**
* @param context
* @param name
*/
public StatsFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType inputFrame = FrameType.buffer2D(FrameType.ELEMENT_INT8);
FrameType floatT = FrameType.single(float.class);
return new Signature()
.addInputPort("buffer", Signature.PORT_REQUIRED, inputFrame)
.addInputPort("cropRect", Signature.PORT_OPTIONAL, FrameType.single(Quad.class))
.addOutputPort("mean", Signature.PORT_REQUIRED, floatT)
.addOutputPort("stdev", Signature.PORT_REQUIRED, floatT)
.disallowOtherPorts();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("cropRect")) {
port.bindToFieldNamed("mCropRect");
port.setAutoPullEnabled(true);
}
}
private void calcMeanAndStd(ByteBuffer pixelBuffer, int width, int height, Quad quad) {
// Native
pixelBuffer.rewind();
regionscore(pixelBuffer, width, height, quad.topLeft().x, quad.topLeft().y,
quad.bottomRight().x, quad.bottomRight().y, mStats);
if (mLogVerbose) {
Log.v(TAG, "Native calc stats: Mean = " + mStats[MEAN_INDEX] + ", Stdev = "
+ mStats[STDEV_INDEX]);
}
}
/**
* @see androidx.media.filterfw.Filter#onProcess()
*/
@Override
protected void onProcess() {
FrameBuffer2D inputFrame = getConnectedInputPort("buffer").pullFrame().asFrameImage2D();
ByteBuffer pixelBuffer = inputFrame.lockBytes(Frame.MODE_READ);
calcMeanAndStd(pixelBuffer, inputFrame.getWidth(), inputFrame.getHeight(), mCropRect);
inputFrame.unlock();
OutputPort outPort = getConnectedOutputPort("mean");
FrameValue outFrame = outPort.fetchAvailableFrame(null).asFrameValue();
outFrame.setValue(mStats[MEAN_INDEX]);
outPort.pushFrame(outFrame);
OutputPort outPortStdev = getConnectedOutputPort("stdev");
FrameValue outFrameStdev = outPortStdev.fetchAvailableFrame(null).asFrameValue();
outFrameStdev.setValue(mStats[STDEV_INDEX]);
outPortStdev.pushFrame(outFrameStdev);
}
private native void regionscore(ByteBuffer imageBuffer, int width, int height, float left,
float top, float right, float bottom, float[] statsArray);
static {
System.loadLibrary("smartcamera_jni");
}
}

View File

@ -0,0 +1,197 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.image;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import androidx.media.filterfw.FrameImage2D;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.ImageShader;
import androidx.media.filterfw.InputPort;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.RenderTarget;
import androidx.media.filterfw.Signature;
import androidx.media.filterfw.ViewFilter;
public class SurfaceHolderTarget extends ViewFilter {
private SurfaceHolder mSurfaceHolder = null;
private RenderTarget mRenderTarget = null;
private ImageShader mShader = null;
private boolean mHasSurface = false;
private SurfaceHolder.Callback mSurfaceHolderListener = new SurfaceHolder.Callback() {
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// This just makes sure the holder is still the one we expect.
onSurfaceCreated(holder);
}
@Override
public void surfaceCreated (SurfaceHolder holder) {
onSurfaceCreated(holder);
}
@Override
public void surfaceDestroyed (SurfaceHolder holder) {
onDestroySurface();
}
};
public SurfaceHolderTarget(MffContext context, String name) {
super(context, name);
}
@Override
public void onBindToView(View view) {
if (view instanceof SurfaceView) {
SurfaceHolder holder = ((SurfaceView)view).getHolder();
if (holder == null) {
throw new RuntimeException("Could not get SurfaceHolder from SurfaceView "
+ view + "!");
}
setSurfaceHolder(holder);
} else {
throw new IllegalArgumentException("View must be a SurfaceView!");
}
}
public void setSurfaceHolder(SurfaceHolder holder) {
if (isRunning()) {
throw new IllegalStateException("Cannot set SurfaceHolder while running!");
}
mSurfaceHolder = holder;
}
public synchronized void onDestroySurface() {
if (mRenderTarget != null) {
mRenderTarget.release();
mRenderTarget = null;
}
mHasSurface = false;
}
@Override
public Signature getSignature() {
FrameType imageType = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
return super.getSignature()
.addInputPort("image", Signature.PORT_REQUIRED, imageType)
.disallowOtherPorts();
}
@Override
protected void onInputPortOpen(InputPort port) {
super.connectViewInputs(port);
}
@Override
protected synchronized void onPrepare() {
if (isOpenGLSupported()) {
mShader = ImageShader.createIdentity();
}
}
@Override
protected synchronized void onOpen() {
mSurfaceHolder.addCallback(mSurfaceHolderListener);
Surface surface = mSurfaceHolder.getSurface();
mHasSurface = (surface != null) && surface.isValid();
}
@Override
protected synchronized void onProcess() {
FrameImage2D image = getConnectedInputPort("image").pullFrame().asFrameImage2D();
if (mHasSurface) {
// Synchronize the surface holder in case another filter is accessing this surface.
synchronized (mSurfaceHolder) {
if (isOpenGLSupported()) {
renderGL(image);
} else {
renderCanvas(image);
}
}
}
}
/**
* Renders the given frame to the screen using GLES2.
* @param image the image to render
*/
private void renderGL(FrameImage2D image) {
if (mRenderTarget == null) {
mRenderTarget = RenderTarget.currentTarget().forSurfaceHolder(mSurfaceHolder);
mRenderTarget.registerAsDisplaySurface();
}
Rect frameRect = new Rect(0, 0, image.getWidth(), image.getHeight());
Rect surfRect = mSurfaceHolder.getSurfaceFrame();
setupShader(mShader, frameRect, surfRect);
mShader.process(image.lockTextureSource(),
mRenderTarget,
surfRect.width(),
surfRect.height());
image.unlock();
mRenderTarget.swapBuffers();
}
/**
* Renders the given frame to the screen using a Canvas.
* @param image the image to render
*/
private void renderCanvas(FrameImage2D image) {
Canvas canvas = mSurfaceHolder.lockCanvas();
Bitmap bitmap = image.toBitmap();
Rect sourceRect = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
Rect surfaceRect = mSurfaceHolder.getSurfaceFrame();
RectF targetRect = getTargetRect(sourceRect, surfaceRect);
canvas.drawColor(Color.BLACK);
if (targetRect.width() > 0 && targetRect.height() > 0) {
canvas.scale(surfaceRect.width(), surfaceRect.height());
canvas.drawBitmap(bitmap, sourceRect, targetRect, new Paint());
}
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
@Override
protected synchronized void onClose() {
if (mRenderTarget != null) {
mRenderTarget.unregisterAsDisplaySurface();
mRenderTarget.release();
mRenderTarget = null;
}
if (mSurfaceHolder != null) {
mSurfaceHolder.removeCallback(mSurfaceHolderListener);
}
}
private synchronized void onSurfaceCreated(SurfaceHolder holder) {
if (mSurfaceHolder != holder) {
throw new RuntimeException("Unexpected Holder!");
}
mHasSurface = true;
}
}

View File

@ -0,0 +1,66 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.text;
import android.view.View;
import android.widget.TextView;
import androidx.media.filterfw.FrameType;
import androidx.media.filterfw.FrameValue;
import androidx.media.filterfw.MffContext;
import androidx.media.filterfw.Signature;
import androidx.media.filterfw.ViewFilter;
public class TextViewTarget extends ViewFilter {
private TextView mTextView = null;
public TextViewTarget(MffContext context, String name) {
super(context, name);
}
@Override
public void onBindToView(View view) {
if (view instanceof TextView) {
mTextView = (TextView)view;
} else {
throw new IllegalArgumentException("View must be a TextView!");
}
}
@Override
public Signature getSignature() {
return new Signature()
.addInputPort("text", Signature.PORT_REQUIRED, FrameType.single(String.class))
.disallowOtherPorts();
}
@Override
protected void onProcess() {
FrameValue textFrame = getConnectedInputPort("text").pullFrame().asFrameValue();
final String text = (String)textFrame.getValue();
if (mTextView != null) {
mTextView.post(new Runnable() {
@Override
public void run() {
mTextView.setText(text);
}
});
}
}
}

View File

@ -0,0 +1,121 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterfw;
import android.graphics.Bitmap;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
public class TextureSource {
private int mTexId;
private int mTarget;
private boolean mIsOwner;
private boolean mIsAllocated = false;
public static TextureSource fromTexture(int texId, int target) {
return new TextureSource(texId, target, false);
}
public static TextureSource fromTexture(int texId) {
return new TextureSource(texId, GLES20.GL_TEXTURE_2D, false);
}
public static TextureSource newTexture() {
return new TextureSource(GLToolbox.generateTexture(), GLES20.GL_TEXTURE_2D, true);
}
public static TextureSource newExternalTexture() {
return new TextureSource(GLToolbox.generateTexture(),
GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
true);
}
public int getTextureId() {
return mTexId;
}
public int getTarget() {
return mTarget;
}
public void bind() {
GLES20.glBindTexture(mTarget, mTexId);
GLToolbox.checkGlError("glBindTexture");
}
public void allocate(int width, int height) {
//Log.i("TextureSource", "Allocating empty texture " + mTexId + ": " + width + "x" + height + ".");
GLToolbox.allocateTexturePixels(mTexId, mTarget, width, height);
mIsAllocated = true;
}
public void allocateWithPixels(ByteBuffer pixels, int width, int height) {
//Log.i("TextureSource", "Uploading pixels to texture " + mTexId + ": " + width + "x" + height + ".");
GLToolbox.setTexturePixels(mTexId, mTarget, pixels, width, height);
mIsAllocated = true;
}
public void allocateWithBitmapPixels(Bitmap bitmap) {
//Log.i("TextureSource", "Uploading pixels to texture " + mTexId + "!");
GLToolbox.setTexturePixels(mTexId, mTarget, bitmap);
mIsAllocated = true;
}
public void generateMipmaps() {
GLES20.glBindTexture(mTarget, mTexId);
GLES20.glTexParameteri(mTarget,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR_MIPMAP_LINEAR);
GLES20.glGenerateMipmap(mTarget);
GLES20.glBindTexture(mTarget, 0);
}
public void setParameter(int parameter, int value) {
GLES20.glBindTexture(mTarget, mTexId);
GLES20.glTexParameteri(mTarget, parameter, value);
GLES20.glBindTexture(mTarget, 0);
}
/**
* @hide
*/
public void release() {
if (GLToolbox.isTexture(mTexId) && mIsOwner) {
GLToolbox.deleteTexture(mTexId);
}
mTexId = GLToolbox.textureNone();
}
@Override
public String toString() {
return "TextureSource(id=" + mTexId + ", target=" + mTarget + ")";
}
boolean isAllocated() {
return mIsAllocated;
}
private TextureSource(int texId, int target, boolean isOwner) {
mTexId = texId;
mTarget = target;
mIsOwner = isOwner;
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.performance;
public class Throughput {
private final int mTotalFrames;
private final int mPeriodFrames;
private final long mPeriodTime;
public Throughput(int totalFrames, int periodFrames, long periodTime, int size) {
mTotalFrames = totalFrames;
mPeriodFrames = periodFrames;
mPeriodTime = periodTime;
}
public int getTotalFrameCount() {
return mTotalFrames;
}
public int getPeriodFrameCount() {
return mPeriodFrames;
}
public long getPeriodTime() {
return mPeriodTime;
}
public float getFramesPerSecond() {
return mPeriodFrames / (mPeriodTime / 1000.0f);
}
@Override
public String toString() {
return Math.round(getFramesPerSecond()) + " FPS";
}
}

View File

@ -0,0 +1,95 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media.filterpacks.performance;
import android.util.Log;
import android.os.SystemClock;
import androidx.media.filterfw.*;
public class ThroughputFilter extends Filter {
private int mPeriod = 3;
private long mLastTime = 0;
private int mTotalFrameCount = 0;
private int mPeriodFrameCount = 0;
public ThroughputFilter(MffContext context, String name) {
super(context, name);
}
@Override
public Signature getSignature() {
FrameType throughputType = FrameType.single(Throughput.class);
return new Signature()
.addInputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
.addOutputPort("throughput", Signature.PORT_REQUIRED, throughputType)
.addOutputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
.addInputPort("period", Signature.PORT_OPTIONAL, FrameType.single(int.class))
.disallowOtherPorts();
}
@Override
public void onInputPortOpen(InputPort port) {
if (port.getName().equals("period")) {
port.bindToFieldNamed("mPeriod");
} else {
port.attachToOutputPort(getConnectedOutputPort("frame"));
}
}
@Override
protected void onOpen() {
mTotalFrameCount = 0;
mPeriodFrameCount = 0;
mLastTime = 0;
}
@Override
protected synchronized void onProcess() {
Frame inputFrame = getConnectedInputPort("frame").pullFrame();
// Update stats
++mTotalFrameCount;
++mPeriodFrameCount;
// Check clock
if (mLastTime == 0) {
mLastTime = SystemClock.elapsedRealtime();
}
long curTime = SystemClock.elapsedRealtime();
// Output throughput info if time period is up
if ((curTime - mLastTime) >= (mPeriod * 1000)) {
Log.i("Thru", "It is time!");
OutputPort tpPort = getConnectedOutputPort("throughput");
Throughput throughput = new Throughput(mTotalFrameCount,
mPeriodFrameCount,
curTime - mLastTime,
inputFrame.getElementCount());
FrameValue throughputFrame = tpPort.fetchAvailableFrame(null).asFrameValue();
throughputFrame.setValue(throughput);
tpPort.pushFrame(throughputFrame);
mLastTime = curTime;
mPeriodFrameCount = 0;
}
getConnectedOutputPort("frame").pushFrame(inputFrame);
}
}

Some files were not shown because too many files have changed in this diff Show More