Saturday, March 16, 2013

AV Sync Logic





AV Sync Logic in Android Multimedia Framework

Audio Playback is real time and depends on rate of Sampling frequency. Audio is being used as master or reference for the Audio Video Synchronization logic. AV Sync logic is managed by Awesomeplayer method onVideoEvent. Where , there is drop in video frame - if video is decoded late than a threshold delay from audio. There is resend of video event to read new decoded frame , if the video decoded in advance of audio threshold. Otherwise if the video decoding time lies in audio threshold then render the video frame through Awesomeplayer Renderer. 

AwesomePlayer class is using TimeSource class to get actual rendering time.If there is no audio track present then TimeSource API is using system clock (GetSystemTimeUs).If audio track is available then TimeSource API is implemented by audioPlayer. Audioplayer will provide audio timestamp (latency data) captured from audioFlinger/AudioHAL.

AudioPlayer::AudioSinkCallback() {
…….
                       mInputBuffer->meta_data()->findInt64(kKeyTime, &mPositionTimeMediaUs));
……….
}

int64_t AudioPlayer::getMediaTimeUs() {
……..
          int64_t realTimeOffset = -mLatencyUs + (mNumFramesPlayed * 1000000) / mSampleRate -    mPositionTimeRealUs;
           if (realTimeOffset < 0) {
               realTimeOffset = 0;
              }
           return mPositionTimeMediaUs + realTimeOffset;
}

status_t AwesomePlayer::getPosition(int64_t *positionUs) {
                                        if (mSeeking != NO_SEEK) {
                                        *positionUs = mSeekTimeUs;
                                       } else if (mVideoSource != NULL && (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
                                       *positionUs = mVideoTimeUs;
                                       } else if (mAudioPlayer != NULL) {
                                        *positionUs = mAudioPlayer->getMediaTimeUs();
                                       } else {
                                          *positionUs = 0;
                                      }
}
 

status_t AwesomePlayer::play_l() {
……………………..
if (mAudioPlayer == NULL) {
       if (mAudioSink != NULL) {
            mAudioPlayer = new AudioPlayer(mAudioSink, this);
            mAudioPlayer->setSource(mAudioSource);
            mTimeSource = mAudioPlayer;
       }
   }
}

void AwesomePlayer::onVideoEvent() {
         status_t err = mVideoSource->read(&mVideoBuffer, &options);
         mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs);
         TimeSource *ts = ((mFlags & AUDIO_AT_EOS) || !(mFlags & AUDIOPLAYER_STARTED)) ? &mSystemTimeSource : mTimeSource;
         int64_t realTimeUs, mediaTimeUs;
         if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL &&                    mAudioPlayer->getMediaTimeMapping(&realTimeUs,
&mediaTimeUs)) {
          mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
          }
         int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
         int64_t latenessUs = nowUs - timeUs;
         if (latenessUs > 40000) { // We're more than 40ms late.
             mVideoBuffer->release(); mVideoBuffer = NULL;
             postVideoEvent_l(); 
             return; 
         }
         if (latenessUs < -10000) { // We're more than 10ms early.
             postVideoEvent_l(10000); return;
         }
         if (mVideoRenderer != NULL)
          mVideoRenderer->render(mVideoBuffer);
}

if (mTimeSource == NULL && mAudioPlayer == NULL) {
     mTimeSource = &mSystemTimeSource;
}
int64_t SystemTimeSource::getRealTimeUs() {
       return GetSystemTimeUs() - mStartTimeUs;
}

int64_t SystemTimeSource::GetSystemTimeUs() {
     struct timeval tv;
     gettimeofday(&tv, NULL);
      return (int64_t)tv.tv_sec * 1000000 + tv.tv_usec;
}

No comments:

Post a Comment