/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.net.rtp; import java.util.HashMap; import java.util.Map; /** * An AudioGroup acts as a router connected to the speaker, the microphone, and * {@link AudioStream}s. Its execution loop consists of four steps. First, for * each AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its * incoming packets and stores in its buffer. Then, if the microphone is * enabled, processes the recorded audio and stores in its buffer. Third, if the * speaker is enabled, mixes and playbacks buffers of all AudioStreams. Finally, * for each AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all * other buffers and sends back the encoded packets. An AudioGroup does nothing * if there is no AudioStream in it. * *

Few things must be noticed before using these classes. The performance is * highly related to the system load and the network bandwidth. Usually a * simpler {@link AudioCodec} costs fewer CPU cycles but requires more network * bandwidth, and vise versa. Using two AudioStreams at the same time not only * doubles the load but also the bandwidth. The condition varies from one device * to another, and developers must choose the right combination in order to get * the best result. * *

It is sometimes useful to keep multiple AudioGroups at the same time. For * example, a Voice over IP (VoIP) application might want to put a conference * call on hold in order to make a new call but still allow people in the * previous call to talk to each other. This can be done easily using two * AudioGroups, but there are some limitations. Since the speaker and the * microphone are shared globally, only one AudioGroup is allowed to run in * modes other than {@link #MODE_ON_HOLD}. In addition, before adding an * AudioStream into an AudioGroup, one should always put all other AudioGroups * into {@link #MODE_ON_HOLD}. That will make sure the audio driver correctly * initialized.

* *

Using this class requires * {@link android.Manifest.permission#RECORD_AUDIO} permission.

* * @see AudioStream * @hide */ public class AudioGroup { /** * This mode is similar to {@link #MODE_NORMAL} except the speaker and * the microphone are disabled. */ public static final int MODE_ON_HOLD = 0; /** * This mode is similar to {@link #MODE_NORMAL} except the microphone is * muted. */ public static final int MODE_MUTED = 1; /** * This mode indicates that the speaker, the microphone, and all * {@link AudioStream}s in the group are enabled. First, the packets * received from the streams are decoded and mixed with the audio recorded * from the microphone. Then, the results are played back to the speaker, * encoded and sent back to each stream. */ public static final int MODE_NORMAL = 2; /** * This mode is similar to {@link #MODE_NORMAL} except the echo suppression * is enabled. It should be only used when the speaker phone is on. */ public static final int MODE_ECHO_SUPPRESSION = 3; private static final int MODE_LAST = 3; private final Map mStreams; private int mMode = MODE_ON_HOLD; private int mNative; static { System.loadLibrary("rtp_jni"); } /** * Creates an empty AudioGroup. */ public AudioGroup() { mStreams = new HashMap(); } /** * Returns the {@link AudioStream}s in this group. */ public AudioStream[] getStreams() { synchronized (this) { return mStreams.keySet().toArray(new AudioStream[mStreams.size()]); } } /** * Returns the current mode. */ public int getMode() { return mMode; } /** * Changes the current mode. It must be one of {@link #MODE_ON_HOLD}, * {@link #MODE_MUTED}, {@link #MODE_NORMAL}, and * {@link #MODE_ECHO_SUPPRESSION}. * * @param mode The mode to change to. * @throws IllegalArgumentException if the mode is invalid. */ public void setMode(int mode) { if (mode < 0 || mode > MODE_LAST) { throw new IllegalArgumentException("Invalid mode"); } synchronized (this) { nativeSetMode(mode); mMode = mode; } } private native void nativeSetMode(int mode); // Package-private method used by AudioStream.join(). void add(AudioStream stream, AudioCodec codec, int dtmfType) { synchronized (this) { if (!mStreams.containsKey(stream)) { try { int socket = stream.dup(); String codecSpec = String.format("%d %s %s", codec.type, codec.rtpmap, codec.fmtp); nativeAdd(stream.getMode(), socket, stream.getRemoteAddress().getHostAddress(), stream.getRemotePort(), codecSpec, dtmfType); mStreams.put(stream, socket); } catch (NullPointerException e) { throw new IllegalStateException(e); } } } } private native void nativeAdd(int mode, int socket, String remoteAddress, int remotePort, String codecSpec, int dtmfType); // Package-private method used by AudioStream.join(). void remove(AudioStream stream) { synchronized (this) { Integer socket = mStreams.remove(stream); if (socket != null) { nativeRemove(socket); } } } private native void nativeRemove(int socket); /** * Sends a DTMF digit to every {@link AudioStream} in this group. Currently * only event {@code 0} to {@code 15} are supported. * * @throws IllegalArgumentException if the event is invalid. */ public void sendDtmf(int event) { if (event < 0 || event > 15) { throw new IllegalArgumentException("Invalid event"); } synchronized (this) { nativeSendDtmf(event); } } private native void nativeSendDtmf(int event); /** * Removes every {@link AudioStream} in this group. */ public void clear() { synchronized (this) { mStreams.clear(); nativeRemove(-1); } } @Override protected void finalize() throws Throwable { clear(); super.finalize(); } }