Merge "Added Visualizer effect." into gingerbread

This commit is contained in:
Eric Laurent
2010-07-07 16:23:30 -07:00
committed by Android (Google) Code Review
22 changed files with 2495 additions and 619 deletions

View File

@ -307,29 +307,18 @@ public:
int32_t priority() const { return mPriority; }
/* Enables the effect engine.
/* Enables or disables the effect engine.
*
* Parameters:
* None.
* enabled: requested enable state.
*
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful operation
* - INVALID_OPERATION: the application does not have control of the effect engine
* - INVALID_OPERATION: the application does not have control of the effect engine or the
* effect is already in the requested state.
*/
status_t enable();
/* Disables the effect engine.
*
* Parameters:
* None.
*
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful operation
* - INVALID_OPERATION: the application does not have control of the effect engine
*/
status_t disable();
bool isEnabled() const;
virtual status_t setEnabled(bool enabled);
bool getEnabled() const;
/* Sets a parameter value.
*
@ -342,7 +331,7 @@ public:
* - BAD_VALUE: invalid parameter identifier or value.
* - DEAD_OBJECT: the effect engine has been deleted.
*/
status_t setParameter(effect_param_t *param);
virtual status_t setParameter(effect_param_t *param);
/* Prepare a new parameter value that will be set by next call to
* setParameterCommit(). This method can be used to set multiple parameters
@ -359,7 +348,7 @@ public:
* - NO_MEMORY: no more space available in shared memory used for deferred parameter
* setting.
*/
status_t setParameterDeferred(effect_param_t *param);
virtual status_t setParameterDeferred(effect_param_t *param);
/* Commit all parameter values previously prepared by setParameterDeferred().
*
@ -373,7 +362,7 @@ public:
* as to which of the parameters caused this error.
* - DEAD_OBJECT: the effect engine has been deleted.
*/
status_t setParameterCommit();
virtual status_t setParameterCommit();
/* Gets a parameter value.
*
@ -387,13 +376,17 @@ public:
* - BAD_VALUE: invalid parameter identifier.
* - DEAD_OBJECT: the effect engine has been deleted.
*/
status_t getParameter(effect_param_t *param);
virtual status_t getParameter(effect_param_t *param);
/* Sends a command and receives a response to/from effect engine.
* See EffectApi.h for details on effect command() function, valid command codes
* and formats.
*/
status_t command(int32_t cmdCode, int32_t cmdSize, void *cmdData, int32_t *replySize, void *replyData);
virtual status_t command(int32_t cmdCode,
int32_t cmdSize,
void *cmdData,
int32_t *replySize,
void *replyData);
/*
@ -409,6 +402,17 @@ public:
*/
static status_t guidToString(const effect_uuid_t *guid, char *str, size_t maxLen);
protected:
volatile int32_t mEnabled; // enable state
int32_t mSessionId; // audio session ID
int32_t mPriority; // priority for effect control
status_t mStatus; // effect status
effect_callback_t mCbf; // callback function for status, control and
// parameter changes notifications
void* mUserData; // client context for callback function
effect_descriptor_t mDescriptor; // effect descriptor
int32_t mId; // system wide unique effect engine instance ID
private:
// Implements the IEffectClient interface
@ -419,9 +423,17 @@ private:
EffectClient(AudioEffect *effect) : mEffect(effect){}
// IEffectClient
virtual void controlStatusChanged(bool controlGranted) {mEffect->controlStatusChanged(controlGranted);}
virtual void enableStatusChanged(bool enabled) {mEffect->enableStatusChanged(enabled);}
virtual void commandExecuted(int cmdCode, int cmdSize, void *pCmdData, int replySize, void *pReplyData) {
virtual void controlStatusChanged(bool controlGranted) {
mEffect->controlStatusChanged(controlGranted);
}
virtual void enableStatusChanged(bool enabled) {
mEffect->enableStatusChanged(enabled);
}
virtual void commandExecuted(int cmdCode,
int cmdSize,
void *pCmdData,
int replySize,
void *pReplyData) {
mEffect->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
}
@ -446,14 +458,6 @@ private:
sp<EffectClient> mIEffectClient; // IEffectClient implementation
sp<IMemory> mCblkMemory; // shared memory for deferred parameter setting
effect_param_cblk_t* mCblk; // control block for deferred parameter setting
int32_t mPriority; // priority for effect control
status_t mStatus; // effect status
volatile int32_t mEnabled; // enable state
effect_callback_t mCbf; // callback function for status, control, parameter changes notifications
void* mUserData; // client context for callback function
effect_descriptor_t mDescriptor; // effect descriptor
int32_t mId; // system wide unique effect engine instance identifier
int32_t mSessionId; // audio session ID
};

View File

@ -0,0 +1,55 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_EFFECTVISUALIZERAPI_H_
#define ANDROID_EFFECTVISUALIZERAPI_H_
#include <media/EffectApi.h>
#if __cplusplus
extern "C" {
#endif
//TODO replace by openSL ES include when available
static const effect_uuid_t SL_IID_VISUALIZATION_ =
{ 0xe46b26a0, 0xdddd, 0x11db, 0x8afd, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
const effect_uuid_t * const SL_IID_VISUALIZATION = &SL_IID_VISUALIZATION_;
#define VISUALIZER_CAPTURE_SIZE_MAX 1024 // maximum capture size in samples
#define VISUALIZER_CAPTURE_SIZE_MIN 128 // minimum capture size in samples
/* enumerated parameters for Visualizer effect */
typedef enum
{
VISU_PARAM_CAPTURE_SIZE, // Sets the number PCM samples in the capture.
} t_visualizer_params;
/* commands */
typedef enum
{
VISU_CMD_CAPTURE = EFFECT_CMD_FIRST_PROPRIETARY, // Gets the latest PCM capture.
}t_visualizer_cmds;
// VISU_CMD_CAPTURE retrieves the latest PCM snapshot captured by the visualizer engine.
// It returns the number of samples specified by VISU_PARAM_CAPTURE_SIZE
// in 8 bit unsigned format (0 = 0x80)
#if __cplusplus
} // extern "C"
#endif
#endif /*ANDROID_EFFECTVISUALIZERAPI_H_*/

View File

@ -48,10 +48,6 @@ public:
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
virtual sp<IOMX> getOMX() = 0;
// Take a peek at currently playing audio, for visualization purposes.
// This returns a buffer of 16 bit mono PCM data, or NULL if no visualization buffer is currently available.
virtual sp<IMemory> snoop() = 0;
};
// ----------------------------------------------------------------------------

160
include/media/Visualizer.h Normal file
View File

@ -0,0 +1,160 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_MEDIA_VISUALIZER_H
#define ANDROID_MEDIA_VISUALIZER_H
#include <media/AudioEffect.h>
#include <media/EffectVisualizerApi.h>
#include <string.h>
/**
* The Visualizer class enables application to retrieve part of the currently playing audio for
* visualization purpose. It is not an audio recording interface and only returns partial and low
* quality audio content. However, to protect privacy of certain audio data (e.g voice mail) the use
* of the visualizer requires the permission android.permission.RECORD_AUDIO.
* The audio session ID passed to the constructor indicates which audio content should be
* visualized:
* - If the session is 0, the audio output mix is visualized
* - If the session is not 0, the audio from a particular MediaPlayer or AudioTrack
* using this audio session is visualized
* Two types of representation of audio content can be captured:
* - Waveform data: consecutive 8-bit (unsigned) mono samples by using the getWaveForm() method
* - Frequency data: 8-bit magnitude FFT by using the getFft() method
*
* The length of the capture can be retrieved or specified by calling respectively
* getCaptureSize() and setCaptureSize() methods. Note that the size of the FFT
* is half of the specified capture size but both sides of the spectrum are returned yielding in a
* number of bytes equal to the capture size. The capture size must be a power of 2 in the range
* returned by getMinCaptureSize() and getMaxCaptureSize().
* In addition to the polling capture mode, a callback mode is also available by installing a
* callback function by use of the setCaptureCallBack() method. The rate at which the callback
* is called as well as the type of data returned is specified.
* Before capturing data, the Visualizer must be enabled by calling the setEnabled() method.
* When data capture is not needed any more, the Visualizer should be disabled.
*/
namespace android {
// ----------------------------------------------------------------------------
class Visualizer: public AudioEffect {
public:
enum callback_flags {
CAPTURE_WAVEFORM = 0x00000001, // capture callback returns a PCM wave form
CAPTURE_FFT = 0x00000002, // apture callback returns a frequency representation
CAPTURE_CALL_JAVA = 0x00000004 // the callback thread can call java
};
/* Constructor.
* See AudioEffect constructor for details on parameters.
*/
Visualizer(int32_t priority = 0,
effect_callback_t cbf = 0,
void* user = 0,
int sessionId = 0);
~Visualizer();
virtual status_t setEnabled(bool enabled);
// maximum capture size in samples
static uint32_t getMaxCaptureSize() { return VISUALIZER_CAPTURE_SIZE_MAX; }
// minimum capture size in samples
static uint32_t getMinCaptureSize() { return VISUALIZER_CAPTURE_SIZE_MIN; }
// maximum capture rate in millihertz
static uint32_t getMaxCaptureRate() { return CAPTURE_RATE_MAX; }
// callback used to return periodic PCM or FFT captures to the application. Either one or both
// types of data are returned (PCM and FFT) according to flags indicated when installing the
// callback. When a type of data is not present, the corresponding size (waveformSize or
// fftSize) is 0.
typedef void (*capture_cbk_t)(void* user,
uint32_t waveformSize,
uint8_t *waveform,
uint32_t fftSize,
uint8_t *fft,
uint32_t samplingrate);
// install a callback to receive periodic captures. The capture rate is specified in milliHertz
// and the capture format is according to flags (see callback_flags).
status_t setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, uint32_t rate);
// set the capture size capture size must be a power of two in the range
// [VISUALIZER_CAPTURE_SIZE_MAX. VISUALIZER_CAPTURE_SIZE_MIN]
// must be called when the visualizer is not enabled
status_t setCaptureSize(uint32_t size);
uint32_t getCaptureSize() { return mCaptureSize; }
// returns the capture rate indicated when installing the callback
uint32_t getCaptureRate() { return mCaptureRate; }
// returns the sampling rate of the audio being captured
uint32_t getSamplingRate() { return mSampleRate; }
// return a capture in PCM 8 bit unsigned format. The size of the capture is equal to
// getCaptureSize()
status_t getWaveForm(uint8_t *waveform);
// return a capture in FFT 8 bit signed format. The size of the capture is equal to
// getCaptureSize() but the length of the FFT is half of the size (both parts of the spectrum
// are returned
status_t getFft(uint8_t *fft);
private:
static const uint32_t CAPTURE_RATE_MAX = 20000;
static const uint32_t CAPTURE_RATE_DEF = 10000;
static const uint32_t CAPTURE_SIZE_DEF = VISUALIZER_CAPTURE_SIZE_MAX;
/* internal class to handle the callback */
class CaptureThread : public Thread
{
public:
CaptureThread(Visualizer& receiver, uint32_t captureRate, bool bCanCallJava = false);
private:
friend class Visualizer;
virtual bool threadLoop();
virtual status_t readyToRun();
virtual void onFirstRef();
Visualizer& mReceiver;
Mutex mLock;
uint32_t mSleepTimeUs;
};
status_t doFft(uint8_t *fft, uint8_t *waveform);
void periodicCapture();
uint32_t initCaptureSize();
Mutex mLock;
uint32_t mCaptureRate;
uint32_t mCaptureSize;
uint32_t mSampleRate;
capture_cbk_t mCaptureCallBack;
void *mCaptureCbkUser;
sp<CaptureThread> mCaptureThread;
uint32_t mCaptureFlags;
void *mFftTable;
};
}; // namespace android
#endif // ANDROID_MEDIA_VISUALIZER_H

View File

@ -166,7 +166,6 @@ public:
void notify(int msg, int ext1, int ext2);
static sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
static sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
static int snoop(short *data, int len, int kind);
status_t invoke(const Parcel& request, Parcel *reply);
status_t setMetadataFilter(const Parcel& filter);
status_t getMetadata(bool update_only, bool apply_filter, Parcel *metadata);

View File

@ -17,7 +17,8 @@
#define LOG_TAG "AudioFlinger"
//#define LOG_NDEBUG 0
//
#define LOG_NDEBUG 0
#include <math.h>
#include <signal.h>
@ -52,6 +53,7 @@
#endif
#include <media/EffectsFactoryApi.h>
#include <media/EffectVisualizerApi.h>
// ----------------------------------------------------------------------------
// the sim build doesn't have gettid
@ -4498,6 +4500,11 @@ status_t AudioFlinger::getEffectDescriptor(effect_uuid_t *pUuid, effect_descript
return EffectGetDescriptor(pUuid, descriptor);
}
// this UUID must match the one defined in media/libeffects/EffectVisualizer.cpp
static const effect_uuid_t VISUALIZATION_UUID_ =
{0xd069d9e0, 0x8329, 0x11df, 0x9168, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
sp<IEffect> AudioFlinger::createEffect(pid_t pid,
effect_descriptor_t *pDesc,
const sp<IEffectClient>& effectClient,
@ -4525,6 +4532,15 @@ sp<IEffect> AudioFlinger::createEffect(pid_t pid,
{
Mutex::Autolock _l(mLock);
// check recording permission for visualizer
if (memcmp(&pDesc->type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0 ||
memcmp(&pDesc->uuid, &VISUALIZATION_UUID_, sizeof(effect_uuid_t)) == 0) {
if (!recordingAllowed()) {
lStatus = PERMISSION_DENIED;
goto Exit;
}
}
if (!EffectIsNullUuid(&pDesc->uuid)) {
// if uuid is specified, request effect descriptor
lStatus = EffectGetDescriptor(&pDesc->uuid, &desc);
@ -5089,7 +5105,7 @@ void AudioFlinger::EffectModule::process()
if (mState != ACTIVE) {
switch (mState) {
case RESET:
reset();
reset_l();
mState = STARTING;
// clear auxiliary effect input buffer for next accumulation
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
@ -5097,14 +5113,14 @@ void AudioFlinger::EffectModule::process()
}
return;
case STARTING:
start();
start_l();
mState = ACTIVE;
break;
case STOPPING:
mState = STOPPED;
break;
case STOPPED:
stop();
stop_l();
mState = IDLE;
return;
}
@ -5132,7 +5148,7 @@ void AudioFlinger::EffectModule::process()
}
}
void AudioFlinger::EffectModule::reset()
void AudioFlinger::EffectModule::reset_l()
{
if (mEffectInterface == NULL) {
return;
@ -5205,6 +5221,7 @@ status_t AudioFlinger::EffectModule::configure()
status_t AudioFlinger::EffectModule::init()
{
Mutex::Autolock _l(mLock);
if (mEffectInterface == NULL) {
return NO_INIT;
}
@ -5217,7 +5234,7 @@ status_t AudioFlinger::EffectModule::init()
return status;
}
status_t AudioFlinger::EffectModule::start()
status_t AudioFlinger::EffectModule::start_l()
{
if (mEffectInterface == NULL) {
return NO_INIT;
@ -5231,7 +5248,7 @@ status_t AudioFlinger::EffectModule::start()
return status;
}
status_t AudioFlinger::EffectModule::stop()
status_t AudioFlinger::EffectModule::stop_l()
{
if (mEffectInterface == NULL) {
return NO_INIT;
@ -5247,7 +5264,8 @@ status_t AudioFlinger::EffectModule::stop()
status_t AudioFlinger::EffectModule::command(int cmdCode, int cmdSize, void *pCmdData, int *replySize, void *pReplyData)
{
LOGV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface);
Mutex::Autolock _l(mLock);
// LOGV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface);
if (mEffectInterface == NULL) {
return NO_INIT;
@ -5255,7 +5273,6 @@ status_t AudioFlinger::EffectModule::command(int cmdCode, int cmdSize, void *pCm
status_t status = (*mEffectInterface)->command(mEffectInterface, cmdCode, cmdSize, pCmdData, replySize, pReplyData);
if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) {
int size = (replySize == NULL) ? 0 : *replySize;
Mutex::Autolock _l(mLock);
for (size_t i = 1; i < mHandles.size(); i++) {
sp<EffectHandle> h = mHandles[i].promote();
if (h != 0) {
@ -5322,6 +5339,7 @@ bool AudioFlinger::EffectModule::isEnabled()
status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
{
Mutex::Autolock _l(mLock);
status_t status = NO_ERROR;
// Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume
@ -5347,6 +5365,7 @@ status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right,
status_t AudioFlinger::EffectModule::setDevice(uint32_t device)
{
Mutex::Autolock _l(mLock);
status_t status = NO_ERROR;
if ((mDescriptor.flags & EFFECT_FLAG_DEVICE_MASK) == EFFECT_FLAG_DEVICE_IND) {
// convert device bit field from AudioSystem to EffectApi format.
@ -5366,6 +5385,7 @@ status_t AudioFlinger::EffectModule::setDevice(uint32_t device)
status_t AudioFlinger::EffectModule::setMode(uint32_t mode)
{
Mutex::Autolock _l(mLock);
status_t status = NO_ERROR;
if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_MODE_MASK) == EFFECT_FLAG_AUDIO_MODE_IND) {
// convert audio mode from AudioSystem to EffectApi format.
@ -5586,7 +5606,7 @@ void AudioFlinger::EffectHandle::disconnect()
status_t AudioFlinger::EffectHandle::command(int cmdCode, int cmdSize, void *pCmdData, int *replySize, void *pReplyData)
{
LOGV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p", cmdCode, mHasControl, (mEffect == 0) ? 0 : mEffect.get());
// LOGV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p", cmdCode, mHasControl, (mEffect == 0) ? 0 : mEffect.get());
// only get parameter command is permitted for applications not controlling the effect
if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) {

View File

@ -916,7 +916,7 @@ private:
void process();
status_t command(int cmdCode, int cmdSize, void *pCmdData, int *replySize, void *pReplyData);
void reset();
void reset_l();
status_t configure();
status_t init();
uint32_t state() {
@ -951,8 +951,8 @@ private:
EffectModule(const EffectModule&);
EffectModule& operator = (const EffectModule&);
status_t start();
status_t stop();
status_t start_l();
status_t stop_l();
// update this table when AudioSystem::audio_devices or audio_device_e (in EffectApi.h) are modified
static const uint32_t sDeviceConvTable[];

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,510 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media;
import android.util.Log;
import java.lang.ref.WeakReference;
import java.io.IOException;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
/**
* The Visualizer class enables application to retrieve part of the currently playing audio for
* visualization purpose. It is not an audio recording interface and only returns partial and low
* quality audio content. However, to protect privacy of certain audio data (e.g voice mail) the use
* of the visualizer requires the permission android.permission.RECORD_AUDIO.
* <p>The audio session ID passed to the constructor indicates which audio content should be
* visualized:<br>
* <ul>
* <li>If the session is 0, the audio output mix is visualized</li>
* <li>If the session is not 0, the audio from a particular {@link MediaPlayer} or
* {@link AudioTrack}
* using this audio session is visualized </li>
* </ul>
* <p>Two types of representation of audio content can be captured: <br>
* <ul>
* <li>Waveform data: consecutive 8-bit (unsigned) mono samples by using the
* {@link #getWaveForm(byte[])} method</li>
* <li>Frequency data: 8-bit magnitude FFT by using the {@link #getFft(byte[])} method</li>
* </ul>
* <p>The length of the capture can be retrieved or specified by calling respectively
* {@link #getCaptureSize()} and {@link #setCaptureSize(int)} methods. Note that the size of the FFT
* is half of the specified capture size but both sides of the spectrum are returned yielding in a
* number of bytes equal to the capture size. The capture size must be a power of 2 in the range
* returned by {@link #getCaptureSizeRange()}.
* <p>In addition to the polling capture mode described above with {@link #getWaveForm(byte[])} and
* {@link #getFft(byte[])} methods, a callback mode is also available by installing a listener by
* use of the {@link #setDataCaptureListener(OnDataCaptureListener, int, boolean, boolean)} method.
* The rate at which the listener capture method is called as well as the type of data returned is
* specified.
* <p>Before capturing data, the Visualizer must be enabled by calling the
* {@link #setEnabled(boolean)} method.
* When data capture is not needed any more, the Visualizer should be disabled.
* <p>It is good practice to call the {@link #release()} method when the Visualizer is not used
* anymore to free up native resources associated to the Visualizer instance.
*
* {@hide Pending API council review}
*/
public class Visualizer {
static {
System.loadLibrary("audioeffect_jni");
native_init();
}
private final static String TAG = "Visualizer-JAVA";
/**
* State of a Visualizer object that was not successfully initialized upon creation
*/
public static final int STATE_UNINITIALIZED = 0;
/**
* State of a Visualizer object that is ready to be used.
*/
public static final int STATE_INITIALIZED = 1;
/**
* State of a Visualizer object that is active.
*/
public static final int STATE_ENABLED = 2;
// to keep in sync with frameworks/base/media/jni/audioeffect/android_media_Visualizer.cpp
protected static final int NATIVE_EVENT_PCM_CAPTURE = 0;
protected static final int NATIVE_EVENT_FFT_CAPTURE = 1;
// Error codes:
/**
* Successful operation.
*/
public static final int SUCCESS = 0;
/**
* Unspecified error.
*/
public static final int ERROR = -1;
/**
* Internal opreation status. Not returned by any method.
*/
public static final int ALREADY_EXISTS = -2;
/**
* Operation failed due to bad object initialization.
*/
public static final int ERROR_NO_INIT = -3;
/**
* Operation failed due to bad parameter value.
*/
public static final int ERROR_BAD_VALUE = -4;
/**
* Operation failed because it was requested in wrong state.
*/
public static final int ERROR_INVALID_OPERATION = -5;
/**
* Operation failed due to lack of memory.
*/
public static final int ERROR_NO_MEMORY = -6;
/**
* Operation failed due to dead remote object.
*/
public static final int ERROR_DEAD_OBJECT = -7;
//--------------------------------------------------------------------------
// Member variables
//--------------------
/**
* Indicates the state of the Visualizer instance
*/
protected int mState = STATE_UNINITIALIZED;
/**
* Lock to synchronize access to mState
*/
protected final Object mStateLock = new Object();
/**
* System wide unique Identifier of the visualizer engine used by this Visualizer instance
*/
protected int mId;
/**
* Lock to protect listeners updates against event notifications
*/
protected final Object mListenerLock = new Object();
/**
* Handler for events coming from the native code
*/
protected NativeEventHandler mNativeEventHandler = null;
/**
* PCM and FFT capture listener registered by client
*/
protected OnDataCaptureListener mCaptureListener = null;
// accessed by native methods
private int mNativeVisualizer;
private int mJniData;
//--------------------------------------------------------------------------
// Constructor, Finalize
//--------------------
/**
* Class constructor.
* @param audioSession System wide unique audio session identifier. If audioSession
* is not 0, the visualizer will be attached to the MediaPlayer or AudioTrack in the
* same audio session. Otherwise, the Visualizer will apply to the output mix.
*
* @throws java.lang.UnsupportedOperationException
* @throws java.lang.RuntimeException
*/
public Visualizer(int audioSession)
throws UnsupportedOperationException, RuntimeException {
int[] id = new int[1];
synchronized (mStateLock) {
mState = STATE_UNINITIALIZED;
// native initialization
int result = native_setup(new WeakReference<Visualizer>(this), audioSession, id);
if (result != SUCCESS && result != ALREADY_EXISTS) {
Log.e(TAG, "Error code "+result+" when initializing Visualizer.");
switch (result) {
case ERROR_INVALID_OPERATION:
throw (new UnsupportedOperationException("Effect library not loaded"));
default:
throw (new RuntimeException("Cannot initialize Visualizer engine, error: "
+result));
}
}
mId = id[0];
if (native_getEnabled()) {
mState = STATE_ENABLED;
} else {
mState = STATE_INITIALIZED;
}
}
}
/**
* Releases the native Visualizer resources. It is a good practice to release the
* visualization engine when not in use.
*/
public void release() {
synchronized (mStateLock) {
native_release();
mState = STATE_UNINITIALIZED;
}
}
@Override
protected void finalize() {
native_finalize();
}
/**
* Enable or disable the visualization engine.
* @param enabled requested enable state
* @return {@link #SUCCESS} in case of success,
* {@link #ERROR_INVALID_OPERATION} or {@link #ERROR_DEAD_OBJECT} in case of failure.
* @throws IllegalStateException
*/
public int setEnabled(boolean enabled)
throws IllegalStateException {
synchronized (mStateLock) {
if ((enabled && mState != STATE_INITIALIZED) ||
(!enabled && mState != STATE_ENABLED)) {
throw(new IllegalStateException("setEnabled() called in wrong state: "+mState));
}
int status = native_setEnabled(enabled);
if (status == SUCCESS) {
mState = enabled ? STATE_ENABLED : STATE_INITIALIZED;
}
return status;
}
}
/**
* Get current activation state of the visualizer.
* @return true if the visualizer is active, false otherwise
*/
public boolean getEnabled()
{
synchronized (mStateLock) {
if (mState == STATE_UNINITIALIZED) {
throw(new IllegalStateException("getEnabled() called in wrong state: "+mState));
}
return native_getEnabled();
}
}
/**
* Returns the capture size range.
* @return the mininum capture size is returned in first array element and the maximum in second
* array element.
*/
public static native int[] getCaptureSizeRange();
/**
* Returns the maximum capture rate for the callback capture method. This is the maximum value
* for the rate parameter of the
* {@link #setDataCaptureListener(OnDataCaptureListener, int, boolean, boolean)} method.
* @return the maximum capture rate expressed in milliHertz
*/
public static native int getMaxCaptureRate();
/**
* Sets the capture size, i.e. the number of bytes returned by {@link #getWaveForm(byte[])} and
* {@link #getFft(byte[])} methods. The capture size must be a power of 2 in the range returned
* by {@link #getCaptureSizeRange()}.
* This method must not be called when the Visualizer is enabled.
* @param size requested capture size
* @return {@link #SUCCESS} in case of success,
* {@link #ERROR_BAD_VALUE} in case of failure.
* @throws IllegalStateException
*/
public int setCaptureSize(int size)
throws IllegalStateException {
synchronized (mStateLock) {
if (mState != STATE_INITIALIZED) {
throw(new IllegalStateException("setCaptureSize() called in wrong state: "+mState));
}
return native_setCaptureSize(size);
}
}
/**
* Returns current capture size.
* @return the capture size in bytes.
*/
public int getCaptureSize()
throws IllegalStateException {
synchronized (mStateLock) {
if (mState == STATE_UNINITIALIZED) {
throw(new IllegalStateException("getCaptureSize() called in wrong state: "+mState));
}
return native_getCaptureSize();
}
}
/**
* Returns the sampling rate of the captured audio.
* @return the sampling rate in milliHertz.
*/
public int getSamplingRate()
throws IllegalStateException {
synchronized (mStateLock) {
if (mState == STATE_UNINITIALIZED) {
throw(new IllegalStateException("getSamplingRate() called in wrong state: "+mState));
}
return native_getSamplingRate();
}
}
/**
* Returns a waveform capture of currently playing audio content. The capture consists in
* a number of consecutive 8-bit (unsigned) mono PCM samples equal to the capture size returned
* by {@link #getCaptureSize()}.
* <p>This method must be called when the Visualizer is enabled.
* @param waveform array of bytes where the waveform should be returned
* @return {@link #SUCCESS} in case of success,
* {@link #ERROR_NO_MEMORY}, {@link #ERROR_INVALID_OPERATION} or {@link #ERROR_DEAD_OBJECT}
* in case of failure.
* @throws IllegalStateException
*/
public int getWaveForm(byte[] waveform)
throws IllegalStateException {
synchronized (mStateLock) {
if (mState != STATE_ENABLED) {
throw(new IllegalStateException("getWaveForm() called in wrong state: "+mState));
}
return native_getWaveForm(waveform);
}
}
/**
* Returns a frequency capture of currently playing audio content. The capture is a 8-bit
* magnitude FFT. Note that the size of the FFT is half of the specified capture size but both
* sides of the spectrum are returned yielding in a number of bytes equal to the capture size.
* {@see #getCaptureSize()}.
* <p>This method must be called when the Visualizer is enabled.
* @param fft array of bytes where the FFT should be returned
* @return {@link #SUCCESS} in case of success,
* {@link #ERROR_NO_MEMORY}, {@link #ERROR_INVALID_OPERATION} or {@link #ERROR_DEAD_OBJECT}
* in case of failure.
* @throws IllegalStateException
*/
public int getFft(byte[] fft)
throws IllegalStateException {
synchronized (mStateLock) {
if (mState != STATE_ENABLED) {
throw(new IllegalStateException("getFft() called in wrong state: "+mState));
}
return native_getFft(fft);
}
}
//---------------------------------------------------------
// Interface definitions
//--------------------
/**
* The OnDataCaptureListener interface defines methods called by the Visualizer to periodically
* update the audio visualization capture.
* The client application can implement this interface and register the listener with the
* {@link #setDataCaptureListener(OnDataCaptureListener, int, boolean, boolean)} method.
*/
public interface OnDataCaptureListener {
/**
* Method called when a new waveform capture is available.
* @param visualizer Visualizer object on which the listener is registered.
* @param waveform array of bytes containing the waveform representation.
* @param samplingRate sampling rate of the audio visualized.
*/
void onWaveFormDataCapture(Visualizer visualizer, byte[] waveform, int samplingRate);
/**
* Method called when a new frequency capture is available.
* @param visualizer Visualizer object on which the listener is registered.
* @param fft array of bytes containing the frequency representation.
* @param samplingRate sampling rate of the audio visualized.
*/
void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate);
}
/**
* Registers an OnDataCaptureListener interface and specifies the rate at which the capture
* should be updated as well as the type of capture requested.
* <p>Call this method with a null listener to stop receiving the capture updates.
* @param listener OnDataCaptureListener registered
* @param rate rate in milliHertz at which the capture should be updated
* @param waveform true if a waveform capture is requested: the onWaveFormDataCapture()
* method will be called on the OnDataCaptureListener interface.
* @param fft true if a frequency capture is requested: the onFftDataCapture() method will be
* called on the OnDataCaptureListener interface.
* @return {@link #SUCCESS} in case of success,
* {@link #ERROR_NO_INIT} or {@link #ERROR_BAD_VALUE} in case of failure.
*/
public int setDataCaptureListener(OnDataCaptureListener listener,
int rate, boolean waveform, boolean fft) {
synchronized (mListenerLock) {
mCaptureListener = listener;
}
if (listener == null) {
// make sure capture callback is stopped in native code
waveform = false;
fft = false;
}
int status = native_setPeriodicCapture(rate, waveform, fft);
if (status == SUCCESS) {
if ((listener != null) && (mNativeEventHandler == null)) {
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mNativeEventHandler = new NativeEventHandler(this, looper);
} else if ((looper = Looper.getMainLooper()) != null) {
mNativeEventHandler = new NativeEventHandler(this, looper);
} else {
mNativeEventHandler = null;
status = ERROR_NO_INIT;
}
}
}
return status;
}
/**
* Helper class to handle the forwarding of native events to the appropriate listeners
*/
private class NativeEventHandler extends Handler
{
private Visualizer mVisualizer;
public NativeEventHandler(Visualizer v, Looper looper) {
super(looper);
mVisualizer = v;
}
@Override
public void handleMessage(Message msg) {
if (mVisualizer == null) {
return;
}
OnDataCaptureListener l = null;
synchronized (mListenerLock) {
l = mVisualizer.mCaptureListener;
}
if (l != null) {
byte[] data = (byte[])msg.obj;
int samplingRate = msg.arg1;
switch(msg.what) {
case NATIVE_EVENT_PCM_CAPTURE:
l.onWaveFormDataCapture(mVisualizer, data, samplingRate);
break;
case NATIVE_EVENT_FFT_CAPTURE:
l.onFftDataCapture(mVisualizer, data, samplingRate);
break;
default:
Log.e(TAG,"Unknown native event: "+msg.what);
break;
}
}
}
}
//---------------------------------------------------------
// Interface definitions
//--------------------
private static native final void native_init();
private native final int native_setup(Object audioeffect_this,
int audioSession,
int[] id);
private native final void native_finalize();
private native final void native_release();
private native final int native_setEnabled(boolean enabled);
private native final boolean native_getEnabled();
private native final int native_setCaptureSize(int size);
private native final int native_getCaptureSize();
private native final int native_getSamplingRate();
private native final int native_getWaveForm(byte[] waveform);
private native final int native_getFft(byte[] fft);
private native final int native_setPeriodicCapture(int rate, boolean waveForm, boolean fft);
//---------------------------------------------------------
// Java methods called from the native side
//--------------------
@SuppressWarnings("unused")
private static void postEventFromNative(Object effect_ref,
int what, int arg1, int arg2, Object obj) {
Visualizer visu = (Visualizer)((WeakReference)effect_ref).get();
if (visu == null) {
return;
}
if (visu.mNativeEventHandler != null) {
Message m = visu.mNativeEventHandler.obtainMessage(what, arg1, arg2, obj);
visu.mNativeEventHandler.sendMessage(m);
}
}
}

View File

@ -680,18 +680,6 @@ android_media_MediaPlayer_native_finalize(JNIEnv *env, jobject thiz)
android_media_MediaPlayer_release(env, thiz);
}
static jint
android_media_MediaPlayer_snoop(JNIEnv* env, jobject thiz, jobject data, jint kind) {
jshort* ar = (jshort*)env->GetPrimitiveArrayCritical((jarray)data, 0);
jsize len = env->GetArrayLength((jarray)data);
int ret = 0;
if (ar) {
ret = MediaPlayer::snoop(ar, len, kind);
env->ReleasePrimitiveArrayCritical((jarray)data, ar, 0);
}
return ret;
}
static jint
android_media_MediaPlayer_native_suspend_resume(
JNIEnv *env, jobject thiz, jboolean isSuspend) {
@ -757,7 +745,6 @@ static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_MediaPlayer_native_init},
{"native_setup", "(Ljava/lang/Object;)V", (void *)android_media_MediaPlayer_native_setup},
{"native_finalize", "()V", (void *)android_media_MediaPlayer_native_finalize},
{"snoop", "([SI)I", (void *)android_media_MediaPlayer_snoop},
{"native_suspend_resume", "(Z)I", (void *)android_media_MediaPlayer_native_suspend_resume},
{"getAudioSessionId", "()I", (void *)android_media_MediaPlayer_get_audio_session_id},
{"setAudioSessionId", "(I)V", (void *)android_media_MediaPlayer_set_audio_session_id},

View File

@ -2,7 +2,8 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
android_media_AudioEffect.cpp
android_media_AudioEffect.cpp \
android_media_Visualizer.cpp
LOCAL_SHARED_LIBRARIES := \
libcutils \

View File

@ -455,9 +455,8 @@ static void android_media_AudioEffect_native_release(JNIEnv *env, jobject thiz)
env->SetIntField(thiz, fields.fidJniData, 0);
}
static jint
android_media_AudioEffect_native_enable(JNIEnv *env, jobject thiz)
android_media_AudioEffect_native_setEnabled(JNIEnv *env, jobject thiz, jboolean enabled)
{
// retrieve the AudioEffect object
AudioEffect* lpAudioEffect = (AudioEffect *)env->GetIntField(
@ -469,29 +468,11 @@ android_media_AudioEffect_native_enable(JNIEnv *env, jobject thiz)
return AUDIOEFFECT_ERROR_NO_INIT;
}
return translateError(lpAudioEffect->enable());
return translateError(lpAudioEffect->setEnabled(enabled));
}
static jint
android_media_AudioEffect_native_disable(JNIEnv *env, jobject thiz)
{
// retrieve the AudioEffect object
AudioEffect* lpAudioEffect = (AudioEffect *)env->GetIntField(
thiz, fields.fidNativeAudioEffect);
if (lpAudioEffect == NULL) {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioEffect pointer for disable()");
return AUDIOEFFECT_ERROR_NO_INIT;
}
return translateError(lpAudioEffect->disable());
}
static jboolean
android_media_AudioEffect_native_getEnable(JNIEnv *env, jobject thiz)
android_media_AudioEffect_native_getEnabled(JNIEnv *env, jobject thiz)
{
// retrieve the AudioEffect object
AudioEffect* lpAudioEffect = (AudioEffect *)env->GetIntField(
@ -503,7 +484,7 @@ android_media_AudioEffect_native_getEnable(JNIEnv *env, jobject thiz)
return false;
}
return (jboolean)lpAudioEffect->isEnabled();
return (jboolean)lpAudioEffect->getEnabled();
}
@ -516,7 +497,7 @@ android_media_AudioEffect_native_hasControl(JNIEnv *env, jobject thiz)
if (lpAudioEffect == NULL) {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioEffect pointer for getEnabled()");
"Unable to retrieve AudioEffect pointer for hasControl()");
return false;
}
@ -817,9 +798,8 @@ static JNINativeMethod gMethods[] = {
(void *)android_media_AudioEffect_native_setup},
{"native_finalize", "()V", (void *)android_media_AudioEffect_native_finalize},
{"native_release", "()V", (void *)android_media_AudioEffect_native_release},
{"native_enable", "()I", (void *)android_media_AudioEffect_native_enable},
{"native_disable", "()I", (void *)android_media_AudioEffect_native_disable},
{"native_getEnable", "()Z", (void *)android_media_AudioEffect_native_getEnable},
{"native_setEnabled", "(Z)I", (void *)android_media_AudioEffect_native_setEnabled},
{"native_getEnabled", "()Z", (void *)android_media_AudioEffect_native_getEnabled},
{"native_hasControl", "()Z", (void *)android_media_AudioEffect_native_hasControl},
{"native_setParameter", "(I[BI[B)I", (void *)android_media_AudioEffect_native_setParameter},
{"native_getParameter", "(I[B[I[B)I", (void *)android_media_AudioEffect_native_getParameter},
@ -830,6 +810,8 @@ static JNINativeMethod gMethods[] = {
// ----------------------------------------------------------------------------
extern int register_android_media_visualizer(JNIEnv *env);
int register_android_media_AudioEffect(JNIEnv *env)
{
return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
@ -852,6 +834,11 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
goto bail;
}
if (register_android_media_visualizer(env) < 0) {
LOGE("ERROR: Visualizer native registration failed\n");
goto bail;
}
/* success -- return valid version number */
result = JNI_VERSION_1_4;

View File

@ -0,0 +1,507 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <stdio.h>
//#define LOG_NDEBUG 0
#define LOG_TAG "visualizers-JNI"
#include <utils/Log.h>
#include <nativehelper/jni.h>
#include <nativehelper/JNIHelp.h>
#include <android_runtime/AndroidRuntime.h>
#include "media/Visualizer.h"
using namespace android;
#define VISUALIZER_SUCCESS 0
#define VISUALIZER_ERROR -1
#define VISUALIZER_ERROR_ALREADY_EXISTS -2
#define VISUALIZER_ERROR_NO_INIT -3
#define VISUALIZER_ERROR_BAD_VALUE -4
#define VISUALIZER_ERROR_INVALID_OPERATION -5
#define VISUALIZER_ERROR_NO_MEMORY -6
#define VISUALIZER_ERROR_DEAD_OBJECT -7
#define NATIVE_EVENT_PCM_CAPTURE 0
#define NATIVE_EVENT_FFT_CAPTURE 1
// ----------------------------------------------------------------------------
static const char* const kClassPathName = "android/media/Visualizer";
struct fields_t {
// these fields provide access from C++ to the...
jclass clazzEffect; // Visualizer class
jmethodID midPostNativeEvent; // event post callback method
jfieldID fidNativeVisualizer; // stores in Java the native Visualizer object
jfieldID fidJniData; // stores in Java additional resources used by the native Visualizer
};
static fields_t fields;
struct visualizer_callback_cookie {
jclass visualizer_class; // Visualizer class
jobject visualizer_ref; // Visualizer object instance
};
// ----------------------------------------------------------------------------
class visualizerJniStorage {
public:
visualizer_callback_cookie mCallbackData;
visualizerJniStorage() {
}
~visualizerJniStorage() {
}
};
static jint translateError(int code) {
switch(code) {
case NO_ERROR:
return VISUALIZER_SUCCESS;
case ALREADY_EXISTS:
return VISUALIZER_ERROR_ALREADY_EXISTS;
case NO_INIT:
return VISUALIZER_ERROR_NO_INIT;
case BAD_VALUE:
return VISUALIZER_ERROR_BAD_VALUE;
case INVALID_OPERATION:
return VISUALIZER_ERROR_INVALID_OPERATION;
case NO_MEMORY:
return VISUALIZER_ERROR_NO_MEMORY;
case DEAD_OBJECT:
return VISUALIZER_ERROR_DEAD_OBJECT;
default:
return VISUALIZER_ERROR;
}
}
// ----------------------------------------------------------------------------
static void captureCallback(void* user,
uint32_t waveformSize,
uint8_t *waveform,
uint32_t fftSize,
uint8_t *fft,
uint32_t samplingrate) {
int arg1 = 0;
int arg2 = 0;
size_t size;
visualizer_callback_cookie *callbackInfo = (visualizer_callback_cookie *)user;
JNIEnv *env = AndroidRuntime::getJNIEnv();
LOGV("captureCallback: callbackInfo %p, visualizer_ref %p visualizer_class %p",
callbackInfo,
callbackInfo->visualizer_ref,
callbackInfo->visualizer_class);
if (!user || !env) {
LOGW("captureCallback error user %p, env %p", user, env);
return;
}
if (waveformSize != 0 && waveform != NULL) {
jbyteArray jArray = env->NewByteArray(waveformSize);
if (jArray != NULL) {
jbyte *nArray = env->GetByteArrayElements(jArray, NULL);
memcpy(nArray, waveform, waveformSize);
env->ReleaseByteArrayElements(jArray, nArray, 0);
env->CallStaticVoidMethod(
callbackInfo->visualizer_class,
fields.midPostNativeEvent,
callbackInfo->visualizer_ref,
NATIVE_EVENT_PCM_CAPTURE,
samplingrate,
0,
jArray);
}
}
if (fftSize != 0 && fft != NULL) {
jbyteArray jArray = env->NewByteArray(fftSize);
if (jArray != NULL) {
jbyte *nArray = env->GetByteArrayElements(jArray, NULL);
memcpy(nArray, fft, fftSize);
env->ReleaseByteArrayElements(jArray, nArray, 0);
env->CallStaticVoidMethod(
callbackInfo->visualizer_class,
fields.midPostNativeEvent,
callbackInfo->visualizer_ref,
NATIVE_EVENT_FFT_CAPTURE,
samplingrate,
0,
jArray);
env->DeleteLocalRef(jArray);
}
}
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
}
}
static Visualizer *getVisualizer(JNIEnv* env, jobject thiz)
{
Visualizer *v = (Visualizer *)env->GetIntField(
thiz, fields.fidNativeVisualizer);
if (v == NULL) {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve Visualizer pointer");
}
return v;
}
// ----------------------------------------------------------------------------
// This function gets some field IDs, which in turn causes class initialization.
// It is called from a static block in Visualizer, which won't run until the
// first time an instance of this class is used.
static void
android_media_visualizer_native_init(JNIEnv *env)
{
LOGV("android_media_visualizer_native_init");
fields.clazzEffect = NULL;
// Get the Visualizer class
jclass clazz = env->FindClass(kClassPathName);
if (clazz == NULL) {
LOGE("Can't find %s", kClassPathName);
return;
}
fields.clazzEffect = (jclass)env->NewGlobalRef(clazz);
// Get the postEvent method
fields.midPostNativeEvent = env->GetStaticMethodID(
fields.clazzEffect,
"postEventFromNative", "(Ljava/lang/Object;IIILjava/lang/Object;)V");
if (fields.midPostNativeEvent == NULL) {
LOGE("Can't find Visualizer.%s", "postEventFromNative");
return;
}
// Get the variables fields
// nativeTrackInJavaObj
fields.fidNativeVisualizer = env->GetFieldID(
fields.clazzEffect,
"mNativeVisualizer", "I");
if (fields.fidNativeVisualizer == NULL) {
LOGE("Can't find Visualizer.%s", "mNativeVisualizer");
return;
}
// fidJniData;
fields.fidJniData = env->GetFieldID(
fields.clazzEffect,
"mJniData", "I");
if (fields.fidJniData == NULL) {
LOGE("Can't find Visualizer.%s", "mJniData");
return;
}
}
static jint
android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
jint sessionId, jintArray jId)
{
LOGV("android_media_visualizer_native_setup");
visualizerJniStorage* lpJniStorage = NULL;
int lStatus = VISUALIZER_ERROR_NO_MEMORY;
Visualizer* lpVisualizer = NULL;
jint* nId = NULL;
lpJniStorage = new visualizerJniStorage();
if (lpJniStorage == NULL) {
LOGE("setup: Error creating JNI Storage");
goto setup_failure;
}
lpJniStorage->mCallbackData.visualizer_class = (jclass)env->NewGlobalRef(fields.clazzEffect);
// we use a weak reference so the Visualizer object can be garbage collected.
lpJniStorage->mCallbackData.visualizer_ref = env->NewGlobalRef(weak_this);
LOGV("setup: lpJniStorage: %p visualizer_ref %p visualizer_class %p, &mCallbackData %p",
lpJniStorage,
lpJniStorage->mCallbackData.visualizer_ref,
lpJniStorage->mCallbackData.visualizer_class,
&lpJniStorage->mCallbackData);
if (jId) {
nId = (jint *) env->GetPrimitiveArrayCritical(jId, NULL);
if (nId == NULL) {
LOGE("setup: Error retrieving id pointer");
lStatus = VISUALIZER_ERROR_BAD_VALUE;
goto setup_failure;
}
} else {
LOGE("setup: NULL java array for id pointer");
lStatus = VISUALIZER_ERROR_BAD_VALUE;
goto setup_failure;
}
// create the native Visualizer object
lpVisualizer = new Visualizer(0,
NULL,
NULL,
sessionId);
if (lpVisualizer == NULL) {
LOGE("Error creating Visualizer");
goto setup_failure;
}
lStatus = translateError(lpVisualizer->initCheck());
if (lStatus != VISUALIZER_SUCCESS && lStatus != VISUALIZER_ERROR_ALREADY_EXISTS) {
LOGE("Visualizer initCheck failed %d", lStatus);
goto setup_failure;
}
nId[0] = lpVisualizer->id();
env->ReleasePrimitiveArrayCritical(jId, nId, 0);
nId = NULL;
env->SetIntField(thiz, fields.fidNativeVisualizer, (int)lpVisualizer);
env->SetIntField(thiz, fields.fidJniData, (int)lpJniStorage);
return VISUALIZER_SUCCESS;
// failures:
setup_failure:
if (nId != NULL) {
env->ReleasePrimitiveArrayCritical(jId, nId, 0);
}
if (lpVisualizer) {
delete lpVisualizer;
}
env->SetIntField(thiz, fields.fidNativeVisualizer, 0);
if (lpJniStorage) {
delete lpJniStorage;
}
env->SetIntField(thiz, fields.fidJniData, 0);
return lStatus;
}
// ----------------------------------------------------------------------------
static void android_media_visualizer_native_finalize(JNIEnv *env, jobject thiz) {
LOGV("android_media_visualizer_native_finalize jobject: %x\n", (int)thiz);
// delete the Visualizer object
Visualizer* lpVisualizer = (Visualizer *)env->GetIntField(
thiz, fields.fidNativeVisualizer);
if (lpVisualizer) {
LOGV("deleting Visualizer: %x\n", (int)lpVisualizer);
delete lpVisualizer;
}
// delete the JNI data
visualizerJniStorage* lpJniStorage = (visualizerJniStorage *)env->GetIntField(
thiz, fields.fidJniData);
if (lpJniStorage) {
LOGV("deleting pJniStorage: %x\n", (int)lpJniStorage);
delete lpJniStorage;
}
}
// ----------------------------------------------------------------------------
static void android_media_visualizer_native_release(JNIEnv *env, jobject thiz) {
// do everything a call to finalize would
android_media_visualizer_native_finalize(env, thiz);
// + reset the native resources in the Java object so any attempt to access
// them after a call to release fails.
env->SetIntField(thiz, fields.fidNativeVisualizer, 0);
env->SetIntField(thiz, fields.fidJniData, 0);
}
static jint
android_media_visualizer_native_setEnabled(JNIEnv *env, jobject thiz, jboolean enabled)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return VISUALIZER_ERROR_NO_INIT;
}
return translateError(lpVisualizer->setEnabled(enabled));
}
static jboolean
android_media_visualizer_native_getEnabled(JNIEnv *env, jobject thiz)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return false;
}
return (jboolean)lpVisualizer->getEnabled();
}
static jintArray
android_media_visualizer_native_getCaptureSizeRange(JNIEnv *env, jobject thiz)
{
jintArray jRange = env->NewIntArray(2);
jint *nRange = env->GetIntArrayElements(jRange, NULL);
nRange[0] = Visualizer::getMinCaptureSize();
nRange[1] = Visualizer::getMaxCaptureSize();
LOGV("getCaptureSizeRange() min %d max %d", nRange[0], nRange[1]);
env->ReleaseIntArrayElements(jRange, nRange, 0);
return jRange;
}
static jint
android_media_visualizer_native_getMaxCaptureRate(JNIEnv *env, jobject thiz)
{
return Visualizer::getMaxCaptureRate();
}
static jint
android_media_visualizer_native_setCaptureSize(JNIEnv *env, jobject thiz, jint size)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return VISUALIZER_ERROR_NO_INIT;
}
return translateError(lpVisualizer->setCaptureSize(size));
}
static jint
android_media_visualizer_native_getCaptureSize(JNIEnv *env, jobject thiz)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return -1;
}
return lpVisualizer->getCaptureSize();
}
static jint
android_media_visualizer_native_getSamplingRate(JNIEnv *env, jobject thiz)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return -1;
}
return lpVisualizer->getSamplingRate();
}
static jint
android_media_visualizer_native_getWaveForm(JNIEnv *env, jobject thiz, jbyteArray jWaveform)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return VISUALIZER_ERROR_NO_INIT;
}
jbyte* nWaveform = (jbyte *) env->GetPrimitiveArrayCritical(jWaveform, NULL);
if (nWaveform == NULL) {
return VISUALIZER_ERROR_NO_MEMORY;
}
jint status = translateError(lpVisualizer->getWaveForm((uint8_t *)nWaveform));
env->ReleasePrimitiveArrayCritical(jWaveform, nWaveform, 0);
return status;
}
static jint
android_media_visualizer_native_getFft(JNIEnv *env, jobject thiz, jbyteArray jFft)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return VISUALIZER_ERROR_NO_INIT;
}
jbyte* nFft = (jbyte *) env->GetPrimitiveArrayCritical(jFft, NULL);
if (nFft == NULL) {
return VISUALIZER_ERROR_NO_MEMORY;
}
jint status = translateError(lpVisualizer->getFft((uint8_t *)nFft));
env->ReleasePrimitiveArrayCritical(jFft, nFft, 0);
return status;
}
static jint
android_media_setPeriodicCapture(JNIEnv *env, jobject thiz, jint rate, jboolean jWaveform, jboolean jFft)
{
Visualizer* lpVisualizer = getVisualizer(env, thiz);
if (lpVisualizer == NULL) {
return VISUALIZER_ERROR_NO_INIT;
}
visualizerJniStorage* lpJniStorage = (visualizerJniStorage *)env->GetIntField(thiz,
fields.fidJniData);
if (lpJniStorage == NULL) {
return VISUALIZER_ERROR_NO_INIT;
}
LOGV("setPeriodicCapture: rate %d, jWaveform %d jFft %d",
rate,
jWaveform,
jFft);
uint32_t flags = Visualizer::CAPTURE_CALL_JAVA;
if (jWaveform) flags |= Visualizer::CAPTURE_WAVEFORM;
if (jFft) flags |= Visualizer::CAPTURE_FFT;
Visualizer::capture_cbk_t cbk = captureCallback;
if (!jWaveform && !jFft) cbk = NULL;
return translateError(lpVisualizer->setCaptureCallBack(cbk,
&lpJniStorage->mCallbackData,
flags,
rate));
}
// ----------------------------------------------------------------------------
// Dalvik VM type signatures
static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_visualizer_native_init},
{"native_setup", "(Ljava/lang/Object;I[I)I",
(void *)android_media_visualizer_native_setup},
{"native_finalize", "()V", (void *)android_media_visualizer_native_finalize},
{"native_release", "()V", (void *)android_media_visualizer_native_release},
{"native_setEnabled", "(Z)I", (void *)android_media_visualizer_native_setEnabled},
{"native_getEnabled", "()Z", (void *)android_media_visualizer_native_getEnabled},
{"getCaptureSizeRange", "()[I", (void *)android_media_visualizer_native_getCaptureSizeRange},
{"getMaxCaptureRate", "()I", (void *)android_media_visualizer_native_getMaxCaptureRate},
{"native_setCaptureSize", "(I)I", (void *)android_media_visualizer_native_setCaptureSize},
{"native_getCaptureSize", "()I", (void *)android_media_visualizer_native_getCaptureSize},
{"native_getSamplingRate", "()I", (void *)android_media_visualizer_native_getSamplingRate},
{"native_getWaveForm", "([B)I", (void *)android_media_visualizer_native_getWaveForm},
{"native_getFft", "([B)I", (void *)android_media_visualizer_native_getFft},
{"native_setPeriodicCapture","(IZZ)I",(void *)android_media_setPeriodicCapture},
};
// ----------------------------------------------------------------------------
int register_android_media_visualizer(JNIEnv *env)
{
return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
}

View File

@ -94,3 +94,33 @@ LOCAL_PRELINK_MODULE := false
include $(BUILD_SHARED_LIBRARY)
endif
# Visualizer library
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
EffectVisualizer.cpp
LOCAL_CFLAGS+= -O2
LOCAL_SHARED_LIBRARIES := \
libcutils
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx
LOCAL_MODULE:= libvisualizer
ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
LOCAL_LDLIBS += -ldl
endif
ifneq ($(TARGET_SIMULATOR),true)
LOCAL_SHARED_LIBRARIES += libdl
endif
LOCAL_C_INCLUDES := \
$(call include-path-for, graphics corecg)
LOCAL_PRELINK_MODULE := false
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,401 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "Visualizer"
//#define LOG_NDEBUG 0
#include <cutils/log.h>
#include <assert.h>
#include <stdlib.h>
#include <string.h>
#include <new>
#include <media/EffectVisualizerApi.h>
namespace android {
// effect_interface_t interface implementation for visualizer effect
extern "C" const struct effect_interface_s gVisualizerInterface;
// Google Visualizer UUID: d069d9e0-8329-11df-9168-0002a5d5c51b
const effect_descriptor_t gVisualizerDescriptor = {
{0xe46b26a0, 0xdddd, 0x11db, 0x8afd, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
{0xd069d9e0, 0x8329, 0x11df, 0x9168, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
EFFECT_API_VERSION,
(EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST),
0, // TODO
1,
"Visualizer",
"Google Inc.",
};
enum visualizer_state_e {
VISUALIZER_STATE_UNINITIALIZED,
VISUALIZER_STATE_INITIALIZED,
VISUALIZER_STATE_ACTIVE,
};
struct VisualizerContext {
const struct effect_interface_s *mItfe;
effect_config_t mConfig;
uint32_t mState;
uint32_t mCaptureIdx;
uint32_t mCaptureSize;
uint32_t mCurrentBuf;
uint8_t mCaptureBuf[2][VISUALIZER_CAPTURE_SIZE_MAX];
};
//
//--- Local functions
//
void Visualizer_reset(VisualizerContext *pContext)
{
pContext->mCaptureIdx = 0;
pContext->mCurrentBuf = 0;
memset(pContext->mCaptureBuf[0], 0, VISUALIZER_CAPTURE_SIZE_MAX);
memset(pContext->mCaptureBuf[1], 0, VISUALIZER_CAPTURE_SIZE_MAX);
}
//----------------------------------------------------------------------------
// Visualizer_configure()
//----------------------------------------------------------------------------
// Purpose: Set input and output audio configuration.
//
// Inputs:
// pContext: effect engine context
// pConfig: pointer to effect_config_t structure holding input and output
// configuration parameters
//
// Outputs:
//
//----------------------------------------------------------------------------
int Visualizer_configure(VisualizerContext *pContext, effect_config_t *pConfig)
{
LOGV("Visualizer_configure start");
if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate) return -EINVAL;
if (pConfig->inputCfg.channels != pConfig->outputCfg.channels) return -EINVAL;
if (pConfig->inputCfg.format != pConfig->outputCfg.format) return -EINVAL;
if (pConfig->inputCfg.channels != CHANNEL_STEREO) return -EINVAL;
if (pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_WRITE &&
pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_ACCUMULATE) return -EINVAL;
if (pConfig->inputCfg.format != SAMPLE_FORMAT_PCM_S15) return -EINVAL;
memcpy(&pContext->mConfig, pConfig, sizeof(effect_config_t));
Visualizer_reset(pContext);
return 0;
}
//----------------------------------------------------------------------------
// Visualizer_init()
//----------------------------------------------------------------------------
// Purpose: Initialize engine with default configuration.
//
// Inputs:
// pContext: effect engine context
//
// Outputs:
//
//----------------------------------------------------------------------------
int Visualizer_init(VisualizerContext *pContext)
{
pContext->mConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
pContext->mConfig.inputCfg.channels = CHANNEL_STEREO;
pContext->mConfig.inputCfg.format = SAMPLE_FORMAT_PCM_S15;
pContext->mConfig.inputCfg.samplingRate = 44100;
pContext->mConfig.inputCfg.bufferProvider.getBuffer = NULL;
pContext->mConfig.inputCfg.bufferProvider.releaseBuffer = NULL;
pContext->mConfig.inputCfg.bufferProvider.cookie = NULL;
pContext->mConfig.inputCfg.mask = EFFECT_CONFIG_ALL;
pContext->mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
pContext->mConfig.outputCfg.channels = CHANNEL_STEREO;
pContext->mConfig.outputCfg.format = SAMPLE_FORMAT_PCM_S15;
pContext->mConfig.outputCfg.samplingRate = 44100;
pContext->mConfig.outputCfg.bufferProvider.getBuffer = NULL;
pContext->mConfig.outputCfg.bufferProvider.releaseBuffer = NULL;
pContext->mConfig.outputCfg.bufferProvider.cookie = NULL;
pContext->mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
pContext->mCaptureSize = VISUALIZER_CAPTURE_SIZE_MAX;
Visualizer_configure(pContext, &pContext->mConfig);
return 0;
}
//
//--- Effect Library Interface Implementation
//
extern "C" int EffectQueryNumberEffects(uint32_t *pNumEffects) {
*pNumEffects = 1;
return 0;
}
extern "C" int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) {
if (pDescriptor == NULL) {
return -EINVAL;
}
if (index > 0) {
return -EINVAL;
}
memcpy(pDescriptor, &gVisualizerDescriptor, sizeof(effect_descriptor_t));
return 0;
}
extern "C" int EffectCreate(effect_uuid_t *uuid,
int32_t sessionId,
int32_t ioId,
effect_interface_t *pInterface) {
int ret;
int i;
if (pInterface == NULL || uuid == NULL) {
return -EINVAL;
}
if (memcmp(uuid, &gVisualizerDescriptor.uuid, sizeof(effect_uuid_t)) != 0) {
return -EINVAL;
}
VisualizerContext *pContext = new VisualizerContext;
pContext->mItfe = &gVisualizerInterface;
pContext->mState = VISUALIZER_STATE_UNINITIALIZED;
ret = Visualizer_init(pContext);
if (ret < 0) {
LOGW("EffectCreate() init failed");
delete pContext;
return ret;
}
*pInterface = (effect_interface_t)pContext;
pContext->mState = VISUALIZER_STATE_INITIALIZED;
LOGV("EffectCreate %p", pContext);
return 0;
}
extern "C" int EffectRelease(effect_interface_t interface) {
VisualizerContext * pContext = (VisualizerContext *)interface;
LOGV("EffectRelease %p", interface);
if (pContext == NULL) {
return -EINVAL;
}
pContext->mState = VISUALIZER_STATE_UNINITIALIZED;
delete pContext;
return 0;
}
//
//--- Effect Control Interface Implementation
//
static inline int16_t clamp16(int32_t sample)
{
if ((sample>>15) ^ (sample>>31))
sample = 0x7FFF ^ (sample>>31);
return sample;
}
extern "C" int Visualizer_process(
effect_interface_t self,audio_buffer_t *inBuffer, audio_buffer_t *outBuffer)
{
android::VisualizerContext * pContext = (android::VisualizerContext *)self;
if (pContext == NULL) {
return -EINVAL;
}
if (pContext->mState != VISUALIZER_STATE_ACTIVE) {
return -ENOSYS;
}
if (inBuffer == NULL || inBuffer->raw == NULL ||
outBuffer == NULL || outBuffer->raw == NULL ||
inBuffer->frameCount != outBuffer->frameCount ||
inBuffer->frameCount == 0) {
return -EINVAL;
}
// all code below assumes stereo 16 bit PCM output and input
uint32_t captIdx;
uint32_t inIdx;
uint8_t *buf = pContext->mCaptureBuf[pContext->mCurrentBuf];
for (inIdx = 0, captIdx = pContext->mCaptureIdx;
inIdx < inBuffer->frameCount && captIdx < pContext->mCaptureSize;
inIdx++, captIdx++) {
int32_t smp = inBuffer->s16[2 * inIdx] + inBuffer->s16[2 * inIdx + 1];
smp = (smp + (1 << 8)) >> 9;
buf[captIdx] = ((uint8_t)smp)^0x80;
}
pContext->mCaptureIdx = captIdx;
// go to next buffer when buffer full
if (pContext->mCaptureIdx == pContext->mCaptureSize) {
pContext->mCurrentBuf ^= 1;
pContext->mCaptureIdx = 0;
}
if (inBuffer->raw != outBuffer->raw) {
if (pContext->mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
for (size_t i = 0; i < outBuffer->frameCount*2; i++) {
outBuffer->s16[i] = clamp16(outBuffer->s16[i] + inBuffer->s16[i]);
}
} else {
memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount * 2 * sizeof(int16_t));
}
}
return 0;
} // end Visualizer_process
extern "C" int Visualizer_command(effect_interface_t self, int cmdCode, int cmdSize,
void *pCmdData, int *replySize, void *pReplyData) {
android::VisualizerContext * pContext = (android::VisualizerContext *)self;
int retsize;
if (pContext == NULL || pContext->mState == VISUALIZER_STATE_UNINITIALIZED) {
return -EINVAL;
}
// LOGV("Visualizer_command command %d cmdSize %d",cmdCode, cmdSize);
switch (cmdCode) {
case EFFECT_CMD_INIT:
if (pReplyData == NULL || *replySize != sizeof(int)) {
return -EINVAL;
}
*(int *) pReplyData = Visualizer_init(pContext);
break;
case EFFECT_CMD_CONFIGURE:
if (pCmdData == NULL || cmdSize != sizeof(effect_config_t)
|| pReplyData == NULL || *replySize != sizeof(int)) {
return -EINVAL;
}
*(int *) pReplyData = Visualizer_configure(pContext,
(effect_config_t *) pCmdData);
break;
case EFFECT_CMD_RESET:
Visualizer_reset(pContext);
break;
case EFFECT_CMD_ENABLE:
if (pReplyData == NULL || *replySize != sizeof(int)) {
return -EINVAL;
}
if (pContext->mState != VISUALIZER_STATE_INITIALIZED) {
return -ENOSYS;
}
pContext->mState = VISUALIZER_STATE_ACTIVE;
LOGV("EFFECT_CMD_ENABLE() OK");
*(int *)pReplyData = 0;
break;
case EFFECT_CMD_DISABLE:
if (pReplyData == NULL || *replySize != sizeof(int)) {
return -EINVAL;
}
if (pContext->mState != VISUALIZER_STATE_ACTIVE) {
return -ENOSYS;
}
pContext->mState = VISUALIZER_STATE_INITIALIZED;
LOGV("EFFECT_CMD_DISABLE() OK");
*(int *)pReplyData = 0;
break;
case EFFECT_CMD_GET_PARAM: {
if (pCmdData == NULL ||
cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t)) ||
pReplyData == NULL ||
*replySize < (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t))) {
return -EINVAL;
}
memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(uint32_t));
effect_param_t *p = (effect_param_t *)pReplyData;
p->status = 0;
*replySize = sizeof(effect_param_t) + sizeof(uint32_t);
if (p->psize != sizeof(uint32_t) ||
*(uint32_t *)p->data != VISU_PARAM_CAPTURE_SIZE) {
p->status = -EINVAL;
break;
}
LOGV("get mCaptureSize = %d", pContext->mCaptureSize);
*((uint32_t *)p->data + 1) = pContext->mCaptureSize;
p->vsize = sizeof(uint32_t);
*replySize += sizeof(uint32_t);
} break;
case EFFECT_CMD_SET_PARAM: {
if (pCmdData == NULL ||
cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t)) ||
pReplyData == NULL || *replySize != sizeof(int32_t)) {
return -EINVAL;
}
*(int32_t *)pReplyData = 0;
effect_param_t *p = (effect_param_t *)pCmdData;
if (p->psize != sizeof(uint32_t) ||
p->vsize != sizeof(uint32_t) ||
*(uint32_t *)p->data != VISU_PARAM_CAPTURE_SIZE) {
*(int32_t *)pReplyData = -EINVAL;
break;;
}
pContext->mCaptureSize = *((uint32_t *)p->data + 1);
LOGV("set mCaptureSize = %d", pContext->mCaptureSize);
} break;
case EFFECT_CMD_SET_DEVICE:
case EFFECT_CMD_SET_VOLUME:
case EFFECT_CMD_SET_AUDIO_MODE:
break;
case VISU_CMD_CAPTURE:
if (pReplyData == NULL || *replySize != (int)pContext->mCaptureSize) {
LOGV("VISU_CMD_CAPTURE() error *replySize %d pContext->mCaptureSize %d",
*replySize, pContext->mCaptureSize);
return -EINVAL;
}
if (pContext->mState == VISUALIZER_STATE_ACTIVE) {
memcpy(pReplyData,
pContext->mCaptureBuf[pContext->mCurrentBuf ^ 1],
pContext->mCaptureSize);
} else {
memset(pReplyData, 0x80, pContext->mCaptureSize);
}
break;
default:
LOGW("Visualizer_command invalid command %d",cmdCode);
return -EINVAL;
}
return 0;
}
// effect_interface_t interface implementation for visualizer effect
const struct effect_interface_s gVisualizerInterface = {
Visualizer_process,
Visualizer_command
};
} // namespace

View File

@ -30,7 +30,8 @@ LOCAL_SRC_FILES:= \
MediaProfiles.cpp \
IEffect.cpp \
IEffectClient.cpp \
AudioEffect.cpp
AudioEffect.cpp \
Visualizer.cpp
LOCAL_SHARED_LIBRARIES := \
libui libcutils libutils libbinder libsonivox libicuuc libexpat libsurfaceflinger_client libcamera_client

View File

@ -171,7 +171,7 @@ AudioEffect::~AudioEffect()
LOGV("Destructor %p", this);
if (mStatus == NO_ERROR || mStatus == ALREADY_EXISTS) {
disable();
setEnabled(false);
if (mIEffect != NULL) {
mIEffect->disconnect();
mIEffect->asBinder()->unlinkToDeath(mIEffectClient);
@ -196,36 +196,28 @@ effect_descriptor_t AudioEffect::descriptor() const
return mDescriptor;
}
bool AudioEffect::isEnabled() const
bool AudioEffect::getEnabled() const
{
return (mEnabled != 0);
}
status_t AudioEffect::enable()
status_t AudioEffect::setEnabled(bool enabled)
{
if (mStatus != NO_ERROR) {
return INVALID_OPERATION;
}
LOGV("enable %p", this);
if (android_atomic_or(1, &mEnabled) == 0) {
return mIEffect->enable();
if (enabled) {
LOGV("enable %p", this);
if (android_atomic_or(1, &mEnabled) == 0) {
return mIEffect->enable();
}
} else {
LOGV("disable %p", this);
if (android_atomic_and(~1, &mEnabled) == 1) {
return mIEffect->disable();
}
}
return INVALID_OPERATION;
}
status_t AudioEffect::disable()
{
if (mStatus != NO_ERROR) {
return INVALID_OPERATION;
}
LOGV("disable %p", this);
if (android_atomic_and(~1, &mEnabled) == 1) {
return mIEffect->disable();
}
return INVALID_OPERATION;
}
@ -349,7 +341,7 @@ void AudioEffect::controlStatusChanged(bool controlGranted)
void AudioEffect::enableStatusChanged(bool enabled)
{
LOGV("enableStatusChanged %p enabled %d", this, enabled);
LOGV("enableStatusChanged %p enabled %d mCbf %p", this, enabled, mCbf);
if (mStatus == ALREADY_EXISTS) {
mEnabled = enabled;
if (mCbf) {

View File

@ -35,8 +35,7 @@ enum {
DECODE_FD,
CREATE_MEDIA_RECORDER,
CREATE_METADATA_RETRIEVER,
GET_OMX,
SNOOP
GET_OMX
};
class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@ -134,14 +133,6 @@ public:
return interface_cast<IMemory>(reply.readStrongBinder());
}
virtual sp<IMemory> snoop()
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
remote()->transact(SNOOP, data, &reply);
return interface_cast<IMemory>(reply.readStrongBinder());
}
virtual sp<IOMX> getOMX() {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@ -221,12 +212,6 @@ status_t BnMediaPlayerService::onTransact(
reply->writeStrongBinder(player->asBinder());
return NO_ERROR;
} break;
case SNOOP: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<IMemory> snooped_audio = snoop();
reply->writeStrongBinder(snooped_audio->asBinder());
return NO_ERROR;
} break;
case CREATE_MEDIA_RECORDER: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
pid_t pid = data.readInt32();

View File

@ -0,0 +1,330 @@
/*
**
** Copyright 2010, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "Visualizer"
#include <utils/Log.h>
#include <stdint.h>
#include <sys/types.h>
#include <limits.h>
#include <media/Visualizer.h>
extern "C" {
#define FLOATING_POINT 1
#include "fftwrap.h"
}
namespace android {
// ---------------------------------------------------------------------------
Visualizer::Visualizer (int32_t priority,
effect_callback_t cbf,
void* user,
int sessionId)
: AudioEffect(SL_IID_VISUALIZATION, NULL, priority, cbf, user, sessionId),
mCaptureRate(CAPTURE_RATE_DEF),
mCaptureSize(CAPTURE_SIZE_DEF),
mSampleRate(44100000),
mCaptureCallBack(NULL),
mCaptureCbkUser(NULL)
{
initCaptureSize();
if (mCaptureSize != 0) {
mFftTable = spx_fft_init(mCaptureSize);
} else {
mFftTable = NULL;
}
}
Visualizer::~Visualizer()
{
if (mFftTable != NULL) {
spx_fft_destroy(mFftTable);
}
}
status_t Visualizer::setEnabled(bool enabled)
{
Mutex::Autolock _l(mLock);
sp<CaptureThread> t = mCaptureThread;
if (t != 0) {
if (enabled) {
if (t->exitPending()) {
if (t->requestExitAndWait() == WOULD_BLOCK) {
LOGE("Visualizer::enable() called from thread");
return INVALID_OPERATION;
}
}
}
t->mLock.lock();
}
status_t status = AudioEffect::setEnabled(enabled);
if (status == NO_ERROR) {
if (t != 0) {
if (enabled) {
t->run("AudioTrackThread");
} else {
t->requestExit();
}
}
}
if (t != 0) {
t->mLock.unlock();
}
return status;
}
status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, uint32_t rate)
{
if (rate > CAPTURE_RATE_MAX) {
return BAD_VALUE;
}
Mutex::Autolock _l(mLock);
if (mEnabled) {
return INVALID_OPERATION;
}
sp<CaptureThread> t = mCaptureThread;
if (t != 0) {
t->mLock.lock();
}
mCaptureThread.clear();
mCaptureCallBack = cbk;
mCaptureCbkUser = user;
mCaptureFlags = flags;
mCaptureRate = rate;
if (t != 0) {
t->mLock.unlock();
}
if (cbk != NULL) {
mCaptureThread = new CaptureThread(*this, rate, ((flags & CAPTURE_CALL_JAVA) != 0));
if (mCaptureThread == 0) {
LOGE("Could not create callback thread");
return NO_INIT;
}
}
LOGV("setCaptureCallBack() rate: %d thread %p flags 0x%08x",
rate, mCaptureThread.get(), mCaptureFlags);
return NO_ERROR;
}
status_t Visualizer::setCaptureSize(uint32_t size)
{
if (size > VISUALIZER_CAPTURE_SIZE_MAX ||
size < VISUALIZER_CAPTURE_SIZE_MIN ||
AudioSystem::popCount(size) != 1) {
return BAD_VALUE;
}
Mutex::Autolock _l(mLock);
if (mEnabled) {
return INVALID_OPERATION;
}
uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
effect_param_t *p = (effect_param_t *)buf32;
p->psize = sizeof(uint32_t);
p->vsize = sizeof(uint32_t);
*(int32_t *)p->data = VISU_PARAM_CAPTURE_SIZE;
*((int32_t *)p->data + 1)= size;
status_t status = setParameter(p);
LOGV("setCaptureSize size %d status %d p->status %d", size, status, p->status);
if (status == NO_ERROR) {
status = p->status;
}
if (status == NO_ERROR) {
mCaptureSize = size;
if (mFftTable != NULL) {
spx_fft_destroy(mFftTable);
}
mFftTable = spx_fft_init(mCaptureSize);
LOGV("setCaptureSize size %d mFftTable %p", mCaptureSize, mFftTable);
}
return status;
}
status_t Visualizer::getWaveForm(uint8_t *waveform)
{
if (waveform == NULL) {
return BAD_VALUE;
}
if (mCaptureSize == 0) {
return NO_INIT;
}
status_t status = NO_ERROR;
if (mEnabled) {
int32_t replySize = mCaptureSize;
status_t status = command(VISU_CMD_CAPTURE, 0, NULL, &replySize, waveform);
if (replySize == 0) {
status = NOT_ENOUGH_DATA;
}
} else {
memset(waveform, 0x80, mCaptureSize);
}
return status;
}
status_t Visualizer::getFft(uint8_t *fft)
{
if (fft == NULL) {
return BAD_VALUE;
}
if (mCaptureSize == 0) {
return NO_INIT;
}
status_t status = NO_ERROR;
if (mEnabled) {
uint8_t buf[mCaptureSize];
status_t status = getWaveForm(buf);
if (status == NO_ERROR) {
status = doFft(fft, buf);
}
} else {
memset(fft, 0, mCaptureSize);
}
return status;
}
status_t Visualizer::doFft(uint8_t *fft, uint8_t *waveform)
{
if (mFftTable == NULL) {
return NO_INIT;
}
float fsrc[mCaptureSize];
for (uint32_t i = 0; i < mCaptureSize; i++) {
fsrc[i] = (int16_t)(waveform[i] ^ 0x80) << 8;
}
float fdst[mCaptureSize];
spx_fft_float(mFftTable, fsrc, fdst);
for (uint32_t i = 0; i < mCaptureSize; i++) {
fft[i] = (uint8_t)((int32_t)fdst[i] >> 8);
}
return NO_ERROR;
}
void Visualizer::periodicCapture()
{
Mutex::Autolock _l(mLock);
LOGV("periodicCapture() %p mCaptureCallBack %p mCaptureFlags 0x%08x",
this, mCaptureCallBack, mCaptureFlags);
if (mCaptureCallBack != NULL &&
(mCaptureFlags & (CAPTURE_WAVEFORM|CAPTURE_FFT)) &&
mCaptureSize != 0) {
uint8_t waveform[mCaptureSize];
status_t status = getWaveForm(waveform);
if (status != NO_ERROR) {
return;
}
uint8_t fft[mCaptureSize];
if (mCaptureFlags & CAPTURE_FFT) {
status = doFft(fft, waveform);
}
if (status != NO_ERROR) {
return;
}
uint8_t *wavePtr = NULL;
uint8_t *fftPtr = NULL;
uint32_t waveSize = 0;
uint32_t fftSize = 0;
if (mCaptureFlags & CAPTURE_WAVEFORM) {
wavePtr = waveform;
waveSize = mCaptureSize;
}
if (mCaptureFlags & CAPTURE_FFT) {
fftPtr = fft;
fftSize = mCaptureSize;
}
mCaptureCallBack(mCaptureCbkUser, waveSize, wavePtr, fftSize, fftPtr, mSampleRate);
}
}
uint32_t Visualizer::initCaptureSize()
{
uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
effect_param_t *p = (effect_param_t *)buf32;
p->psize = sizeof(uint32_t);
p->vsize = sizeof(uint32_t);
*(int32_t *)p->data = VISU_PARAM_CAPTURE_SIZE;
status_t status = getParameter(p);
if (status == NO_ERROR) {
status = p->status;
}
uint32_t size = 0;
if (status == NO_ERROR) {
size = *((int32_t *)p->data + 1);
}
mCaptureSize = size;
LOGV("initCaptureSize size %d status %d", mCaptureSize, status);
return size;
}
//-------------------------------------------------------------------------
Visualizer::CaptureThread::CaptureThread(Visualizer& receiver, uint32_t captureRate, bool bCanCallJava)
: Thread(bCanCallJava), mReceiver(receiver)
{
mSleepTimeUs = 1000000000 / captureRate;
LOGV("CaptureThread cstor %p captureRate %d mSleepTimeUs %d", this, captureRate, mSleepTimeUs);
}
bool Visualizer::CaptureThread::threadLoop()
{
LOGV("CaptureThread %p enter", this);
while (!exitPending())
{
usleep(mSleepTimeUs);
mReceiver.periodicCapture();
}
LOGV("CaptureThread %p exiting", this);
return false;
}
status_t Visualizer::CaptureThread::readyToRun()
{
return NO_ERROR;
}
void Visualizer::CaptureThread::onFirstRef()
{
}
}; // namespace android

View File

@ -658,61 +658,4 @@ void MediaPlayer::died()
}
extern "C" {
#define FLOATING_POINT 1
#include "fftwrap.h"
}
static void *ffttable = NULL;
// peeks at the audio data and fills 'data' with the requested kind
// (currently kind=0 returns mono 16 bit PCM data, and kind=1 returns
// 256 point FFT data). Return value is number of samples returned,
// which may be 0.
/*static*/ int MediaPlayer::snoop(short* data, int len, int kind) {
sp<IMemory> p;
const sp<IMediaPlayerService>& service = getMediaPlayerService();
if (service != 0) {
// Take a peek at the waveform. The returned data consists of 16 bit mono PCM data.
p = service->snoop();
if (p == NULL) {
return 0;
}
if (kind == 0) { // return waveform data
int plen = p->size();
len *= 2; // number of shorts -> number of bytes
short *src = (short*) p->pointer();
if (plen > len) {
plen = len;
}
memcpy(data, src, plen);
return plen / sizeof(short); // return number of samples
} else if (kind == 1) {
// TODO: use a more efficient FFT
// Right now this uses the speex library, which is compiled to do a float FFT
if (!ffttable) ffttable = spx_fft_init(512);
short *usrc = (short*) p->pointer();
float fsrc[512];
for (int i=0;i<512;i++)
fsrc[i] = usrc[i];
float fdst[512];
spx_fft_float(ffttable, fsrc, fdst);
if (len > 512) {
len = 512;
}
len /= 2; // only half the output data is valid
for (int i=0; i < len; i++)
data[i] = fdst[i];
return len;
}
} else {
LOGE("Unable to locate media service");
}
return 0;
}
}; // namespace android

View File

@ -1265,98 +1265,6 @@ Exit:
return mem;
}
/*
* Avert your eyes, ugly hack ahead.
* The following is to support music visualizations.
*/
static const int NUMVIZBUF = 32;
static const int VIZBUFFRAMES = 1024;
static const int BUFTIMEMSEC = NUMVIZBUF * VIZBUFFRAMES * 1000 / 44100;
static const int TOTALBUFTIMEMSEC = NUMVIZBUF * BUFTIMEMSEC;
static bool gotMem = false;
static sp<MemoryHeapBase> heap;
static sp<MemoryBase> mem[NUMVIZBUF];
static uint64_t endTime;
static uint64_t lastReadTime;
static uint64_t lastWriteTime;
static int writeIdx = 0;
static void allocVizBufs() {
if (!gotMem) {
heap = new MemoryHeapBase(NUMVIZBUF * VIZBUFFRAMES * 2, 0, "snooper");
for (int i=0;i<NUMVIZBUF;i++) {
mem[i] = new MemoryBase(heap, VIZBUFFRAMES * 2 * i, VIZBUFFRAMES * 2);
}
endTime = 0;
gotMem = true;
}
}
/*
* Get a buffer of audio data that is about to be played.
* We don't synchronize this because in practice the writer
* is ahead of the reader, and even if we did happen to catch
* a buffer while it's being written, it's just a visualization,
* so no harm done.
*/
static sp<MemoryBase> getVizBuffer() {
allocVizBufs();
lastReadTime = uptimeMillis();
// if there is no recent buffer (yet), just return empty handed
if (lastWriteTime + TOTALBUFTIMEMSEC < lastReadTime) {
//LOGI("@@@@ no audio data to look at yet: %d + %d < %d", (int)lastWriteTime, TOTALBUFTIMEMSEC, (int)lastReadTime);
return NULL;
}
int timedelta = endTime - lastReadTime;
if (timedelta < 0) timedelta = 0;
int framedelta = timedelta * 44100 / 1000;
int headIdx = (writeIdx - framedelta) / VIZBUFFRAMES - 1;
while (headIdx < 0) {
headIdx += NUMVIZBUF;
}
return mem[headIdx];
}
// Append the data to the vizualization buffer
static void makeVizBuffers(const char *data, int len, uint64_t time) {
allocVizBufs();
uint64_t startTime = time;
const int frameSize = 4; // 16 bit stereo sample is 4 bytes
int offset = writeIdx;
int maxoff = heap->getSize() / 2; // in shorts
short *base = (short*)heap->getBase();
short *src = (short*)data;
while (len > 0) {
// Degrade quality by mixing to mono and clearing the lowest 3 bits.
// This should still be good enough for a visualization
base[offset++] = ((int(src[0]) + int(src[1])) >> 1) & ~0x7;
src += 2;
len -= frameSize;
if (offset >= maxoff) {
offset = 0;
}
}
writeIdx = offset;
endTime = time + (len / frameSize) / 44;
//LOGI("@@@ stored buffers from %d to %d", uint32_t(startTime), uint32_t(time));
}
sp<IMemory> MediaPlayerService::snoop()
{
sp<MemoryBase> mem = getVizBuffer();
return mem;
}
#undef LOG_TAG
#define LOG_TAG "AudioSink"
@ -1371,7 +1279,6 @@ MediaPlayerService::AudioOutput::AudioOutput(int sessionId)
mRightVolume = 1.0;
mLatency = 0;
mMsecsPerFrame = 0;
mNumFramesWritten = 0;
setMinBufferCount();
}
@ -1516,30 +1423,9 @@ void MediaPlayerService::AudioOutput::start()
if (mTrack) {
mTrack->setVolume(mLeftVolume, mRightVolume);
mTrack->start();
mTrack->getPosition(&mNumFramesWritten);
}
}
void MediaPlayerService::AudioOutput::snoopWrite(const void* buffer, size_t size) {
// Only make visualization buffers if anyone recently requested visualization data
uint64_t now = uptimeMillis();
if (lastReadTime + TOTALBUFTIMEMSEC >= now) {
// Based on the current play counter, the number of frames written and
// the current real time we can calculate the approximate real start
// time of the buffer we're about to write.
uint32_t pos;
mTrack->getPosition(&pos);
// we're writing ahead by this many frames:
int ahead = mNumFramesWritten - pos;
//LOGI("@@@ written: %d, playpos: %d, latency: %d", mNumFramesWritten, pos, mTrack->latency());
// which is this many milliseconds, assuming 44100 Hz:
ahead /= 44;
makeVizBuffers((const char*)buffer, size, now + ahead + mTrack->latency());
lastWriteTime = now;
}
}
ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)
@ -1548,9 +1434,7 @@ ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)
//LOGV("write(%p, %u)", buffer, size);
if (mTrack) {
snoopWrite(buffer, size);
ssize_t ret = mTrack->write(buffer, size);
mNumFramesWritten += ret / 4; // assume 16 bit stereo
return ret;
}
return NO_INIT;
@ -1560,7 +1444,6 @@ void MediaPlayerService::AudioOutput::stop()
{
LOGV("stop");
if (mTrack) mTrack->stop();
lastWriteTime = 0;
}
void MediaPlayerService::AudioOutput::flush()
@ -1573,7 +1456,6 @@ void MediaPlayerService::AudioOutput::pause()
{
LOGV("pause");
if (mTrack) mTrack->pause();
lastWriteTime = 0;
}
void MediaPlayerService::AudioOutput::close()
@ -1609,9 +1491,6 @@ void MediaPlayerService::AudioOutput::CallbackWrapper(
buffer->size = actualSize;
if (actualSize > 0) {
me->snoopWrite(buffer->raw, actualSize);
}
}
#undef LOG_TAG

View File

@ -113,9 +113,6 @@ class MediaPlayerService : public BnMediaPlayerService
static bool mIsOnEmulator;
static int mMinBufferCount; // 12 for emulator; otherwise 4
public: // visualization hack support
uint32_t mNumFramesWritten;
void snoopWrite(const void*, size_t);
};
class AudioCache : public MediaPlayerBase::AudioSink
@ -191,7 +188,6 @@ public:
virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length, int audioSessionId);
virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
virtual sp<IMemory> snoop();
virtual sp<IOMX> getOMX();
virtual status_t dump(int fd, const Vector<String16>& args);