sim.android.media.AudioTrack Maven / Gradle / Ivy
The newest version!
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sim.android.media;
import android.media.AudioFormat;
import android.media.AudioManager;
import java.lang.ref.WeakReference;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import phat.mobile.servicemanager.client.ServiceManagerRemote;
import phat.mobile.servicemanager.services.Service;
public class AudioTrack {
//---------------------------------------------------------
// Constants
//--------------------
/**
* Minimum value for a channel volume
*/
private static final float VOLUME_MIN = 0.0f;
/**
* Maximum value for a channel volume
*/
private static final float VOLUME_MAX = 1.0f;
/**
* Minimum value for sample rate
*/
private static final int SAMPLE_RATE_HZ_MIN = 4000;
/**
* Maximum value for sample rate
*/
private static final int SAMPLE_RATE_HZ_MAX = 48000;
/**
* indicates AudioTrack state is stopped
*/
public static final int PLAYSTATE_STOPPED = 1; // matches SL_PLAYSTATE_STOPPED
/**
* indicates AudioTrack state is paused
*/
public static final int PLAYSTATE_PAUSED = 2; // matches SL_PLAYSTATE_PAUSED
/**
* indicates AudioTrack state is playing
*/
public static final int PLAYSTATE_PLAYING = 3; // matches SL_PLAYSTATE_PLAYING
// keep these values in sync with android_media_AudioTrack.cpp
/**
* Creation mode where audio data is transferred from Java to the native
* layer only once before the audio starts playing.
*/
public static final int MODE_STATIC = 0;
/**
* Creation mode where audio data is streamed from Java to the native layer
* as the audio is playing.
*/
public static final int MODE_STREAM = 1;
/**
* State of an AudioTrack that was not successfully initialized upon
* creation.
*/
public static final int STATE_UNINITIALIZED = 0;
/**
* State of an AudioTrack that is ready to be used.
*/
public static final int STATE_INITIALIZED = 1;
/**
* State of a successfully initialized AudioTrack that uses static data, but
* that hasn't received that data yet.
*/
public static final int STATE_NO_STATIC_DATA = 2;
// Error codes:
// to keep in sync with frameworks/base/core/jni/android_media_AudioTrack.cpp
/**
* Denotes a successful operation.
*/
public static final int SUCCESS = 0;
/**
* Denotes a generic operation failure.
*/
public static final int ERROR = -1;
/**
* Denotes a failure due to the use of an invalid value.
*/
public static final int ERROR_BAD_VALUE = -2;
/**
* Denotes a failure due to the improper use of a method.
*/
public static final int ERROR_INVALID_OPERATION = -3;
private static final int ERROR_NATIVESETUP_AUDIOSYSTEM = -16;
private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK = -17;
private static final int ERROR_NATIVESETUP_INVALIDFORMAT = -18;
private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE = -19;
private static final int ERROR_NATIVESETUP_NATIVEINITFAILED = -20;
// Events:
// to keep in sync with frameworks/av/include/media/AudioTrack.h
/**
* Event id denotes when playback head has reached a previously set marker.
*/
private static final int NATIVE_EVENT_MARKER = 3;
/**
* Event id denotes when previously set update period has elapsed during
* playback.
*/
private static final int NATIVE_EVENT_NEW_POS = 4;
private final static String TAG = "android.media.AudioTrack";
//--------------------------------------------------------------------------
// Member variables
//--------------------
/**
* Indicates the state of the AudioTrack instance.
*/
private int mState = STATE_UNINITIALIZED;
/**
* Indicates the play state of the AudioTrack instance.
*/
private int mPlayState = PLAYSTATE_STOPPED;
/**
* Lock to make sure mPlayState updates are reflecting the actual state of
* the object.
*/
private final Object mPlayStateLock = new Object();
/**
* Sizes of the native audio buffer.
*/
private int mNativeBufferSizeInBytes = 0;
private int mNativeBufferSizeInFrames = 0;
/**
* Handler for events coming from the native code.
*/
private NativeEventHandlerDelegate mEventHandlerDelegate;
/**
* Looper associated with the thread that creates the AudioTrack instance.
*/
private final Looper mInitializationLooper;
/**
* The audio data source sampling rate in Hz.
*/
private int mSampleRate; // initialized by all constructors
/**
* The number of audio output channels (1 is mono, 2 is stereo).
*/
private int mChannelCount = 1;
/**
* The audio channel mask.
*/
private int mChannels = AudioFormat.CHANNEL_OUT_MONO;
private int mStreamType = AudioManager.STREAM_MUSIC;
/**
* The way audio is consumed by the audio sink, streaming or static.
*/
private int mDataLoadMode = MODE_STREAM;
/**
* The current audio channel configuration.
*/
private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
/**
* The encoding of the audio samples.
*
* see AudioFormat#ENCODING_PCM_8BIT
* see AudioFormat#ENCODING_PCM_16BIT
*/
private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
/**
* Audio session ID
*/
private int mSessionId = 0;
//--------------------------------
// Used exclusively by native code
//--------------------
/**
* Accessed by native methods: provides access to C++ AudioTrack object.
*/
@SuppressWarnings("unused")
private int mNativeTrackInJavaObj;
/**
* Accessed by native methods: provides access to the JNI data (i.e.
* resources used by the native AudioTrack object, but not stored in it).
*/
@SuppressWarnings("unused")
private int mJniData;
public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
int bufferSizeInBytes, int mode) // throws IllegalArgumentException
{
this(streamType, sampleRateInHz, channelConfig, audioFormat,
bufferSizeInBytes, mode, 0 /*session*/);
}
AudioTrackClient audioTrackClient;
public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
int bufferSizeInBytes, int mode, int sessionId) // throws IllegalArgumentException
{
if (audioTrackClient == null) {
Log.d(TAG, "ServiceManagerRemote().getInstance()");
ServiceManagerRemote smr = ServiceManagerRemote.getInstance();
Log.d(TAG, "ServiceManagerRemote: " + smr);
Service service = smr.getService(null, Service.SPEAKER);
Log.d(TAG, "Service: " + service);
if (service != null) {
Log.d(TAG, "service: " + service.toString());
audioTrackClient = new AudioTrackClient(this, bufferSizeInBytes, service.getIp(), service.getPort());
Log.d(TAG, "audioRecordClient: " + service.getIp() + ":" + service.getPort());
}
}
// mState already == STATE_UNINITIALIZED
// remember which looper is associated with the AudioTrack instantiation
Looper looper;
if ((looper = Looper.myLooper()) == null) {
looper = Looper.getMainLooper();
}
mInitializationLooper = looper;
}
// mask of all the channels supported by this implementation
private static final int SUPPORTED_OUT_CHANNELS
= AudioFormat.CHANNEL_OUT_FRONT_LEFT
| AudioFormat.CHANNEL_OUT_FRONT_RIGHT
| AudioFormat.CHANNEL_OUT_FRONT_CENTER
| AudioFormat.CHANNEL_OUT_LOW_FREQUENCY
| AudioFormat.CHANNEL_OUT_BACK_LEFT
| AudioFormat.CHANNEL_OUT_BACK_RIGHT
| AudioFormat.CHANNEL_OUT_BACK_CENTER;
private void audioParamCheck(int streamType, int sampleRateInHz,
int channelConfig, int audioFormat, int mode) {
//--------------
// stream type
if ((streamType != AudioManager.STREAM_ALARM) && (streamType != AudioManager.STREAM_MUSIC)
&& (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM)
&& (streamType != AudioManager.STREAM_VOICE_CALL)
&& (streamType != AudioManager.STREAM_NOTIFICATION)
// TODO no compile && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)
&& (streamType != AudioManager.STREAM_DTMF)) {
throw new IllegalArgumentException("Invalid stream type.");
}
mStreamType = streamType;
//--------------
// sample rate, note these values are subject to change
if ((sampleRateInHz < 4000) || (sampleRateInHz > 48000)) {
throw new IllegalArgumentException(sampleRateInHz
+ "Hz is not a supported sample rate.");
}
mSampleRate = sampleRateInHz;
//--------------
// channel config
mChannelConfiguration = channelConfig;
switch (channelConfig) {
case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
case AudioFormat.CHANNEL_OUT_MONO:
case AudioFormat.CHANNEL_CONFIGURATION_MONO:
mChannelCount = 1;
mChannels = AudioFormat.CHANNEL_OUT_MONO;
break;
case AudioFormat.CHANNEL_OUT_STEREO:
case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
mChannelCount = 2;
mChannels = AudioFormat.CHANNEL_OUT_STEREO;
break;
default:
if (!isMultichannelConfigSupported(channelConfig)) {
// input channel configuration features unsupported channels
throw new IllegalArgumentException("Unsupported channel configuration.");
}
mChannels = channelConfig;
mChannelCount = Integer.bitCount(channelConfig);
}
//--------------
// audio format
switch (audioFormat) {
case AudioFormat.ENCODING_DEFAULT:
mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
break;
case AudioFormat.ENCODING_PCM_16BIT:
case AudioFormat.ENCODING_PCM_8BIT:
mAudioFormat = audioFormat;
break;
default:
throw new IllegalArgumentException("Unsupported sample encoding."
+ " Should be ENCODING_PCM_8BIT or ENCODING_PCM_16BIT.");
}
//--------------
// audio load mode
if ((mode != MODE_STREAM) && (mode != MODE_STATIC)) {
throw new IllegalArgumentException("Invalid mode.");
}
mDataLoadMode = mode;
}
private static boolean isMultichannelConfigSupported(int channelConfig) {
// check for unsupported channels
if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
loge("Channel configuration features unsupported channels");
return false;
}
// check for unsupported multichannel combinations:
// - FL/FR must be present
// - L/R channels must be paired (e.g. no single L channel)
final int frontPair
= AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
if ((channelConfig & frontPair) != frontPair) {
loge("Front channels must be present in multichannel configurations");
return false;
}
final int backPair
= AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT;
if ((channelConfig & backPair) != 0) {
if ((channelConfig & backPair) != backPair) {
loge("Rear channels can't be used independently");
return false;
}
}
return true;
}
private void audioBuffSizeCheck(int audioBufferSize) {
// NB: this section is only valid with PCM data.
// To update when supporting compressed formats
int frameSizeInBytes = mChannelCount
* (mAudioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
throw new IllegalArgumentException("Invalid audio buffer size.");
}
mNativeBufferSizeInBytes = audioBufferSize;
mNativeBufferSizeInFrames = audioBufferSize / frameSizeInBytes;
}
/**
* Releases the native AudioTrack resources.
*/
public void release() {
// even though native_release() stops the native AudioTrack, we need to stop
// AudioTrack subclasses too.
try {
stop();
} catch (IllegalStateException ise) {
// don't raise an exception, we're releasing the resources.
}
native_release();
mState = STATE_UNINITIALIZED;
}
@Override
protected void finalize() {
native_finalize();
}
static public float getMinVolume() {
return VOLUME_MIN;
}
static public float getMaxVolume() {
return VOLUME_MAX;
}
public int getSampleRate() {
return mSampleRate;
}
public int getPlaybackRate() {
return native_get_playback_rate();
}
public int getAudioFormat() {
return mAudioFormat;
}
public int getStreamType() {
return mStreamType;
}
public int getChannelConfiguration() {
return mChannelConfiguration;
}
public int getChannelCount() {
return mChannelCount;
}
public int getState() {
return mState;
}
public int getPlayState() {
synchronized (mPlayStateLock) {
return mPlayState;
}
}
@Deprecated
protected int getNativeFrameCount() {
return native_get_native_frame_count();
}
public int getNotificationMarkerPosition() {
return native_get_marker_pos();
}
public int getPositionNotificationPeriod() {
return native_get_pos_update_period();
}
public int getPlaybackHeadPosition() {
return native_get_position();
}
public int getLatency() {
return native_get_latency();
}
static public int getNativeOutputSampleRate(int streamType) {
return native_get_output_sample_rate(streamType);
}
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
int channelCount = 0;
switch (channelConfig) {
case AudioFormat.CHANNEL_OUT_MONO:
case AudioFormat.CHANNEL_CONFIGURATION_MONO:
channelCount = 1;
break;
case AudioFormat.CHANNEL_OUT_STEREO:
case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
channelCount = 2;
break;
default:
if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
// input channel configuration features unsupported channels
loge("getMinBufferSize(): Invalid channel configuration.");
return ERROR_BAD_VALUE;
} else {
channelCount = Integer.bitCount(channelConfig);
}
}
if ((audioFormat != AudioFormat.ENCODING_PCM_16BIT)
&& (audioFormat != AudioFormat.ENCODING_PCM_8BIT)) {
loge("getMinBufferSize(): Invalid audio format.");
return ERROR_BAD_VALUE;
}
// sample rate, note these values are subject to change
if ((sampleRateInHz < SAMPLE_RATE_HZ_MIN) || (sampleRateInHz > SAMPLE_RATE_HZ_MAX)) {
loge("getMinBufferSize(): " + sampleRateInHz + " Hz is not a supported sample rate.");
return ERROR_BAD_VALUE;
}
int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
if (size <= 0) {
loge("getMinBufferSize(): error querying hardware");
return ERROR;
} else {
return size;
}
}
public int getAudioSessionId() {
return mSessionId;
}
public boolean getTimestamp(AudioTimestamp timestamp) {
if (timestamp == null) {
throw new IllegalArgumentException();
}
// It's unfortunate, but we have to either create garbage every time or use synchronized
long[] longArray = new long[2];
int ret = native_get_timestamp(longArray);
if (ret != SUCCESS) {
return false;
}
timestamp.framePosition = longArray[0];
timestamp.nanoTime = longArray[1];
return true;
}
public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) {
setPlaybackPositionUpdateListener(listener, null);
}
public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
Handler handler) {
if (listener != null) {
mEventHandlerDelegate = new NativeEventHandlerDelegate(this, listener, handler);
} else {
mEventHandlerDelegate = null;
}
}
public int setStereoVolume(float leftVolume, float rightVolume) {
if (mState == STATE_UNINITIALIZED) {
return ERROR_INVALID_OPERATION;
}
// clamp the volumes
if (leftVolume < getMinVolume()) {
leftVolume = getMinVolume();
}
if (leftVolume > getMaxVolume()) {
leftVolume = getMaxVolume();
}
if (rightVolume < getMinVolume()) {
rightVolume = getMinVolume();
}
if (rightVolume > getMaxVolume()) {
rightVolume = getMaxVolume();
}
native_setVolume(leftVolume, rightVolume);
return SUCCESS;
}
public int setVolume(float volume) {
return setStereoVolume(volume, volume);
}
public int setPlaybackRate(int sampleRateInHz) {
if (mState != STATE_INITIALIZED) {
return ERROR_INVALID_OPERATION;
}
if (sampleRateInHz <= 0) {
return ERROR_BAD_VALUE;
}
return native_set_playback_rate(sampleRateInHz);
}
public int setNotificationMarkerPosition(int markerInFrames) {
if (mState == STATE_UNINITIALIZED) {
return ERROR_INVALID_OPERATION;
}
return native_set_marker_pos(markerInFrames);
}
public int setPositionNotificationPeriod(int periodInFrames) {
if (mState == STATE_UNINITIALIZED) {
return ERROR_INVALID_OPERATION;
}
return native_set_pos_update_period(periodInFrames);
}
public int setPlaybackHeadPosition(int positionInFrames) {
if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED
|| getPlayState() == PLAYSTATE_PLAYING) {
return ERROR_INVALID_OPERATION;
}
if (!(0 <= positionInFrames && positionInFrames <= mNativeBufferSizeInFrames)) {
return ERROR_BAD_VALUE;
}
return native_set_position(positionInFrames);
}
public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED
|| getPlayState() == PLAYSTATE_PLAYING) {
return ERROR_INVALID_OPERATION;
}
if (loopCount == 0) {
; // explicitly allowed as an exception to the loop region range check
} else if (!(0 <= startInFrames && startInFrames < mNativeBufferSizeInFrames
&& startInFrames < endInFrames && endInFrames <= mNativeBufferSizeInFrames)) {
return ERROR_BAD_VALUE;
}
return native_set_loop(startInFrames, endInFrames, loopCount);
}
@Deprecated
protected void setState(int state) {
mState = state;
}
public void play() {
synchronized (mPlayStateLock) {
//native_start();
audioTrackClient.start();
mPlayState = PLAYSTATE_PLAYING;
}
}
public void stop() {
// stop playing
synchronized (mPlayStateLock) {
//native_stop();
audioTrackClient.stop();
mPlayState = PLAYSTATE_STOPPED;
}
}
public void pause() {
//logd("pause()");
// pause playback
synchronized (mPlayStateLock) {
native_pause();
mPlayState = PLAYSTATE_PAUSED;
}
}
public void flush() {
if (mState == STATE_INITIALIZED) {
// flush the data in native layer
native_flush();
}
}
public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) {
int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat);
return ret;
}
public int write(short[] audioData, int offsetInShorts, int sizeInShorts) {
if (mState == STATE_UNINITIALIZED) {
return ERROR_INVALID_OPERATION;
}
if ((audioData == null) || (offsetInShorts < 0) || (sizeInShorts < 0)
|| (offsetInShorts + sizeInShorts < 0) // detect integer overflow
|| (offsetInShorts + sizeInShorts > audioData.length)) {
return ERROR_BAD_VALUE;
}
int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat);
if ((mDataLoadMode == MODE_STATIC)
&& (mState == STATE_NO_STATIC_DATA)
&& (ret > 0)) {
// benign race with respect to other APIs that read mState
mState = STATE_INITIALIZED;
}
return ret;
}
public int reloadStaticData() {
if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED) {
return ERROR_INVALID_OPERATION;
}
return native_reload_static();
}
public int attachAuxEffect(int effectId) {
if (mState == STATE_UNINITIALIZED) {
return ERROR_INVALID_OPERATION;
}
return native_attachAuxEffect(effectId);
}
public int setAuxEffectSendLevel(float level) {
if (mState == STATE_UNINITIALIZED) {
return ERROR_INVALID_OPERATION;
}
// clamp the level
if (level < getMinVolume()) {
level = getMinVolume();
}
if (level > getMaxVolume()) {
level = getMaxVolume();
}
native_setAuxEffectSendLevel(level);
return SUCCESS;
}
public interface OnPlaybackPositionUpdateListener {
void onMarkerReached(AudioTrack track);
void onPeriodicNotification(AudioTrack track);
}
private class NativeEventHandlerDelegate {
private final Handler mHandler;
NativeEventHandlerDelegate(final AudioTrack track,
final OnPlaybackPositionUpdateListener listener,
Handler handler) {
// find the looper for our new event handler
Looper looper;
if (handler != null) {
looper = handler.getLooper();
} else {
// no given handler, use the looper the AudioTrack was created in
looper = mInitializationLooper;
}
// construct the event handler with this looper
if (looper != null) {
// implement the event handler delegate
mHandler = new Handler(looper) {
@Override
public void handleMessage(Message msg) {
if (track == null) {
return;
}
switch (msg.what) {
case NATIVE_EVENT_MARKER:
if (listener != null) {
listener.onMarkerReached(track);
}
break;
case NATIVE_EVENT_NEW_POS:
if (listener != null) {
listener.onPeriodicNotification(track);
}
break;
default:
loge("Unknown native event type: " + msg.what);
break;
}
}
};
} else {
mHandler = null;
}
}
Handler getHandler() {
return mHandler;
}
}
//---------------------------------------------------------
// Java methods called from the native side
//--------------------
@SuppressWarnings("unused")
private static void postEventFromNative(Object audiotrack_ref,
int what, int arg1, int arg2, Object obj) {
//logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
AudioTrack track = (AudioTrack) ((WeakReference) audiotrack_ref).get();
if (track == null) {
return;
}
NativeEventHandlerDelegate delegate = track.mEventHandlerDelegate;
if (delegate != null) {
Handler handler = delegate.getHandler();
if (handler != null) {
Message m = handler.obtainMessage(what, arg1, arg2, obj);
handler.sendMessage(m);
}
}
}
//---------------------------------------------------------
// Native methods called from the Java side
//--------------------
private final int native_setup(Object audiotrack_this,
int streamType, int sampleRate, int nbChannels, int audioFormat,
int buffSizeInBytes, int mode, int[] sessionId) {
return 0;
}
private final void native_finalize() {
}
private final void native_release() {
}
private final void native_start() {
}
private final void native_stop() {
}
private final void native_pause() {
}
private final void native_flush() {
try {
audioTrackClient.flush();
} catch (IOException ex) {
Logger.getLogger(AudioTrack.class.getName()).log(Level.SEVERE, null, ex);
}
}
private final int native_write_byte(byte[] audioData,
int offsetInBytes, int sizeInBytes, int format) {
return 0;
}
private final int native_write_short(short[] audioData,
int offsetInShorts, int sizeInShorts, int format) {
return 0;
}
private final int native_reload_static() {
return 0;
}
private final int native_get_native_frame_count() {
return 0;
}
private final void native_setVolume(float leftVolume, float rightVolume) {
}
private final int native_set_playback_rate(int sampleRateInHz) {
return 0;
}
private final int native_get_playback_rate() {
return 0;
}
private final int native_set_marker_pos(int marker) {
return 0;
}
private final int native_get_marker_pos() {
return 0;
}
private final int native_set_pos_update_period(int updatePeriod) {
return 0;
}
private final int native_get_pos_update_period() {
return 0;
}
private final int native_set_position(int position) {
return 0;
}
private final int native_get_position() {
return 0;
}
private final int native_get_latency() {
return 0;
}
// longArray must be a non-null array of length >= 2
// [0] is assigned the frame position
// [1] is assigned the time in CLOCK_MONOTONIC nanoseconds
private native final int native_get_timestamp(long[] longArray);
private native final int native_set_loop(int start, int end, int loopCount);
static private native final int native_get_output_sample_rate(int streamType);
static private native final int native_get_min_buff_size(
int sampleRateInHz, int channelConfig, int audioFormat);
private native final int native_attachAuxEffect(int effectId);
private native final void native_setAuxEffectSendLevel(float level);
//---------------------------------------------------------
// Utility methods
//------------------
private static void logd(String msg) {
Log.d(TAG, msg);
}
private static void loge(String msg) {
Log.e(TAG, msg);
}
}