play, stop, flush這幾個函數,今天來看看pause函數。
*****************************************源碼*************************************************
//Test case 4: getPlaybackHeadPosition() is > 0 after play(); pause();
@LargeTest
public void testPlaybackHeadPositionAfterPause() throws Exception {
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionAfterPause";
final int TEST_SR = 22050;
final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
//-------- initialization --------------
int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
minBuffSize, TEST_MODE);
byte data[] = new byte[minBuffSize/2];
//-------- test --------------
assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
track.write(data, 0, data.length);
track.write(data, 0, data.length);
track.play();
Thread.sleep(100);
track.pause();
int pos = track.getPlaybackHeadPosition();
log(TEST_NAME, "position ="+ pos);
assertTrue(TEST_NAME, pos > 0);
//-------- tear down --------------
track.release();
}
**********************************************************************************************
源碼路徑:
frameworks\base\media\tests\mediaframeworktest\src\com\android\mediaframeworktest\functional\MediaAudioTrackTest.java
#######################說明################################
//Test case 4: getPlaybackHeadPosition() is > 0 after play(); pause();
@LargeTest
public void testPlaybackHeadPositionAfterPause() throws Exception {
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionAfterPause";
final int TEST_SR = 22050;
final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
//-------- initialization --------------
int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT,
minBuffSize, TEST_MODE);
byte data[] = new byte[minBuffSize/2];
//-------- test --------------
assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
track.write(data, 0, data.length);
track.write(data, 0, data.length);
track.play();
Thread.sleep(100);
track.pause();
++++++++++++++++++++++++++++++pause++++++++++++++++++++++++++++++++++
/**
* Pauses the playback of the audio data.
* @throws IllegalStateException
*/
public void pause()
throws IllegalStateException {
if (mState != STATE_INITIALIZED) {
throw(new IllegalStateException("pause() called on uninitialized AudioTrack."));
}
//logd("pause()");
// pause playback
synchronized(mPlayStateLock) {
native_pause();
+++++++++++++++++++++++++++++++android_media_AudioTrack_pause+++++++++++++++++++++++++++++++++
static void
android_media_AudioTrack_pause(JNIEnv *env, jobject thiz)
{
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
if (lpTrack == NULL ) {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioTrack pointer for pause()");
return;
}
lpTrack->pause();
++++++++++++++++++++++++++++++AudioTrack::pause++++++++++++++++++++++++++++++++++
void AudioTrack::pause()
{
LOGV("pause");
if (android_atomic_and(~1, &mActive) == 1) {
// mAudioTrack在函數AudioTrack::createTrack中被賦值,其最終指向的其實是一個TrackHandle對象
mAudioTrack->pause();
++++++++++++++++++++++++++++AudioFlinger::TrackHandle::pause++++++++++++++++++++++++++++++++++++
void AudioFlinger::TrackHandle::pause() {
mTrack->pause();
++++++++++++++++++++++++++++AudioFlinger::PlaybackThread::Track::pause++++++++++++++++++++++++++++++++++++
void AudioFlinger::PlaybackThread::Track::pause()
{
LOGV("pause(%d), calling thread %d", mName, IPCThreadState::self()->getCallingPid());
sp<ThreadBase> thread = mThread.promote();
if (thread != 0) {
Mutex::Autolock _l(thread->mLock);
if (mState == ACTIVE || mState == RESUMING) {
mState = PAUSING;
LOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get());
if (!isOutputTrack()) {
++++++++++++++++++++++++++++isOutputTrack++++++++++++++++++++++++++++++++++++
bool isOutputTrack() const {
return (mStreamType == AudioSystem::NUM_STREAM_TYPES);
}
----------------------------isOutputTrack------------------------------------
thread->mLock.unlock();
// 在看stop代碼的時候已經看過函數AudioSystem::stopOutput
AudioSystem::stopOutput(thread->id(),
(AudioSystem::stream_type)mStreamType,
mSessionId);
thread->mLock.lock();
}
}
}
}
----------------------------AudioFlinger::PlaybackThread::Track::pause------------------------------------
}
----------------------------AudioFlinger::TrackHandle::pause------------------------------------
}
}
------------------------------AudioTrack::pause----------------------------------
}
-------------------------------android_media_AudioTrack_pause---------------------------------
mPlayState = PLAYSTATE_PAUSED;
}
}
------------------------------pause----------------------------------
int pos = track.getPlaybackHeadPosition();
log(TEST_NAME, "position ="+ pos);
assertTrue(TEST_NAME, pos > 0);
//-------- tear down --------------
track.release();
++++++++++++++++++++++++++++++release++++++++++++++++++++++++++++++++++
/**
* Releases the native AudioTrack resources.
*/
public void release() {
// even though native_release() stops the native AudioTrack, we need to stop
// AudioTrack subclasses too.
try {
stop();
} catch(IllegalStateException ise) {
// don't raise an exception, we're releasing the resources.
}
native_release();
+++++++++++++++++++++++++++++android_media_AudioTrack_native_release+++++++++++++++++++++++++++++++++++
static void android_media_AudioTrack_native_release(JNIEnv *env, jobject thiz) {
// do everything a call to finalize would
android_media_AudioTrack_native_finalize(env, thiz);
+++++++++++++++++++++++++++++android_media_AudioTrack_native_finalize+++++++++++++++++++++++++++++++++++
static void android_media_AudioTrack_native_finalize(JNIEnv *env, jobject thiz) {
//LOGV("android_media_AudioTrack_native_finalize jobject: %x\n", (int)thiz);
// delete the AudioTrack object
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
if (lpTrack) {
//LOGV("deleting lpTrack: %x\n", (int)lpTrack);
lpTrack->stop();
delete lpTrack;
}
// delete the JNI data
AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetIntField(
thiz, javaAudioTrackFields.jniData);
if (pJniStorage) {
// delete global refs created in native_setup
env->DeleteGlobalRef(pJniStorage->mCallbackData.audioTrack_class);
env->DeleteGlobalRef(pJniStorage->mCallbackData.audioTrack_ref);
//LOGV("deleting pJniStorage: %x\n", (int)pJniStorage);
delete pJniStorage;
}
}
-----------------------------android_media_AudioTrack_native_finalize-----------------------------------
// + reset the native resources in the Java object so any attempt to access
// them after a call to release fails.
env->SetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, 0);
env->SetIntField(thiz, javaAudioTrackFields.jniData, 0);
}
-----------------------------android_media_AudioTrack_native_release-----------------------------------
mState = STATE_UNINITIALIZED;
}
------------------------------release----------------------------------
}
###########################################################
&&&&&&&&&&&&&&&&&&&&&&&總結&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
pause與stop類似,都會調用函數AudioSystem::stopOutput。
差別是,stop會reset,也就是將audio_track_cblk_t中的user, server等清0。
而pause不會。
這也就是在播放的時候,按暫停,播放進度條會停在當前位置不動,按停止的話,進度條則回到了初始狀態。
&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
摘自:江風的專欄