From e2bad395e573b62ac1c836b97e37e44f1da59fca Mon Sep 17 00:00:00 2001 From: scrawl Date: Fri, 24 Oct 2014 18:07:17 +0200 Subject: [PATCH 1/4] Leak fix --- apps/openmw/mwgui/windowmanagerimp.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/openmw/mwgui/windowmanagerimp.cpp b/apps/openmw/mwgui/windowmanagerimp.cpp index f431bc8f1..6ab8b94c5 100644 --- a/apps/openmw/mwgui/windowmanagerimp.cpp +++ b/apps/openmw/mwgui/windowmanagerimp.cpp @@ -377,6 +377,7 @@ namespace MWGui delete mHitFader; delete mWerewolfFader; delete mScreenFader; + delete mBlindnessFader; delete mDebugWindow; cleanupGarbage(); From b39d69e98c859a78b813abc70099372763a07bc8 Mon Sep 17 00:00:00 2001 From: scrawl Date: Fri, 24 Oct 2014 21:19:17 +0200 Subject: [PATCH 2/4] Videoplayer fixes, play/pause & seeking - Fix rindex overflow - Fix audio sample size bugs (was using sample_fmt and channel count of the decoder, instead of the resampled settings). We didn't notice this bug before, because the OpenAL MovieAudioFactory tries to resample to a format of the same byte size. - Add support for play/pause and seeking controls (not used by cutscenes in OpenMW) - Closing the video when arriving at the stream end is now handled by the user (we may also want to keep the video open and seek back) The video player now has a standalone demo, at https://github.com/scrawl/ogre-ffmpeg-videoplayer --- apps/openmw/mwgui/videowidget.cpp | 3 +- apps/openmw/mwsound/ffmpeg_decoder.cpp | 6 +- apps/openmw/mwsound/movieaudiofactory.cpp | 4 +- extern/ogre-ffmpeg-videoplayer/CMakeLists.txt | 15 +- .../ogre-ffmpeg-videoplayer/audiodecoder.cpp | 40 +++- .../ogre-ffmpeg-videoplayer/audiodecoder.hpp | 5 +- .../ogre-ffmpeg-videoplayer/videoplayer.cpp | 56 ++++- .../ogre-ffmpeg-videoplayer/videoplayer.hpp | 19 +- extern/ogre-ffmpeg-videoplayer/videostate.cpp | 216 +++++++++++++++--- extern/ogre-ffmpeg-videoplayer/videostate.hpp | 33 ++- 10 files changed, 318 insertions(+), 79 deletions(-) diff --git a/apps/openmw/mwgui/videowidget.cpp b/apps/openmw/mwgui/videowidget.cpp index 76fdd5287..f8054925b 100644 --- a/apps/openmw/mwgui/videowidget.cpp +++ b/apps/openmw/mwgui/videowidget.cpp @@ -30,8 +30,7 @@ int VideoWidget::getVideoHeight() bool VideoWidget::update() { - mPlayer.update(); - return mPlayer.isPlaying(); + return mPlayer.update(); } void VideoWidget::stop() diff --git a/apps/openmw/mwsound/ffmpeg_decoder.cpp b/apps/openmw/mwsound/ffmpeg_decoder.cpp index a2998ad03..b086d4aed 100644 --- a/apps/openmw/mwsound/ffmpeg_decoder.cpp +++ b/apps/openmw/mwsound/ffmpeg_decoder.cpp @@ -148,7 +148,7 @@ size_t FFmpeg_Decoder::readAVAudioData(void *data, size_t length) break; mFramePos = 0; mFrameSize = mFrame->nb_samples * (*mStream)->codec->channels * - av_get_bytes_per_sample((*mStream)->codec->sample_fmt); + av_get_bytes_per_sample(mOutputSampleFormat); } /* Get the amount of bytes remaining to be written, and clamp to @@ -384,7 +384,7 @@ void FFmpeg_Decoder::readAll(std::vector &output) while(getAVAudioData()) { size_t got = mFrame->nb_samples * (*mStream)->codec->channels * - av_get_bytes_per_sample((*mStream)->codec->sample_fmt); + av_get_bytes_per_sample(mOutputSampleFormat); const char *inbuf = reinterpret_cast(mFrameData[0]); output.insert(output.end(), inbuf, inbuf+got); } @@ -403,7 +403,7 @@ void FFmpeg_Decoder::rewind() size_t FFmpeg_Decoder::getSampleOffset() { int delay = (mFrameSize-mFramePos) / (*mStream)->codec->channels / - av_get_bytes_per_sample((*mStream)->codec->sample_fmt); + av_get_bytes_per_sample(mOutputSampleFormat); return (int)(mNextPts*(*mStream)->codec->sample_rate) - delay; } diff --git a/apps/openmw/mwsound/movieaudiofactory.cpp b/apps/openmw/mwsound/movieaudiofactory.cpp index bc7bb8023..97925c7c5 100644 --- a/apps/openmw/mwsound/movieaudiofactory.cpp +++ b/apps/openmw/mwsound/movieaudiofactory.cpp @@ -44,8 +44,8 @@ namespace MWSound size_t getSampleOffset() { - ssize_t clock_delay = (mFrameSize-mFramePos) / mAVStream->codec->channels / - av_get_bytes_per_sample(mAVStream->codec->sample_fmt); + ssize_t clock_delay = (mFrameSize-mFramePos) / av_get_channel_layout_nb_channels(mOutputChannelLayout) / + av_get_bytes_per_sample(mOutputSampleFormat); return (size_t)(mAudioClock*mAVStream->codec->sample_rate) - clock_delay; } diff --git a/extern/ogre-ffmpeg-videoplayer/CMakeLists.txt b/extern/ogre-ffmpeg-videoplayer/CMakeLists.txt index f34ffa64b..299a57799 100644 --- a/extern/ogre-ffmpeg-videoplayer/CMakeLists.txt +++ b/extern/ogre-ffmpeg-videoplayer/CMakeLists.txt @@ -3,15 +3,14 @@ set(OGRE_FFMPEG_VIDEOPLAYER_LIBRARY "ogre-ffmpeg-videoplayer") # Sources set(OGRE_FFMPEG_VIDEOPLAYER_SOURCE_FILES - videoplayer.cpp - videostate.cpp - videodefs.hpp + videoplayer.cpp + videostate.cpp + videodefs.hpp libavwrapper.cpp audiodecoder.cpp audiofactory.hpp ) - # Find FFMPEG set(FFmpeg_FIND_COMPONENTS AVCODEC AVFORMAT AVUTIL SWSCALE SWRESAMPLE AVRESAMPLE) unset(FFMPEG_LIBRARIES CACHE) @@ -30,10 +29,14 @@ else() message(FATAL_ERROR "Install either libswresample (FFmpeg) or libavresample (Libav).") endif() endif() - include_directories(${FFMPEG_INCLUDE_DIRS}) +# Find Boost +set(BOOST_COMPONENTS thread) +find_package(Boost REQUIRED COMPONENTS ${BOOST_COMPONENTS}) +include_directories(${Boost_INCLUDE_DIRS}) + add_library(${OGRE_FFMPEG_VIDEOPLAYER_LIBRARY} STATIC ${OGRE_FFMPEG_VIDEOPLAYER_SOURCE_FILES}) -target_link_libraries(${OGRE_FFMPEG_VIDEOPLAYER_LIBRARY} ${VIDEO_FFMPEG_LIBRARIES}) +target_link_libraries(${OGRE_FFMPEG_VIDEOPLAYER_LIBRARY} ${VIDEO_FFMPEG_LIBRARIES} ${Boost_LIBRARIES}) link_directories(${CMAKE_CURRENT_BINARY_DIR}) diff --git a/extern/ogre-ffmpeg-videoplayer/audiodecoder.cpp b/extern/ogre-ffmpeg-videoplayer/audiodecoder.cpp index 54fe2b24f..41313d5d5 100644 --- a/extern/ogre-ffmpeg-videoplayer/audiodecoder.cpp +++ b/extern/ogre-ffmpeg-videoplayer/audiodecoder.cpp @@ -152,8 +152,8 @@ int MovieAudioDecoder::synchronize_audio() double avg_diff = mAudioDiffAccum * (1.0 - mAudioDiffAvgCoef); if(fabs(avg_diff) >= mAudioDiffThreshold) { - int n = av_get_bytes_per_sample(mAVStream->codec->sample_fmt) * - mAVStream->codec->channels; + int n = av_get_bytes_per_sample(mOutputSampleFormat) * + av_get_channel_layout_nb_channels(mOutputChannelLayout); sample_skip = ((int)(diff * mAVStream->codec->sample_rate) * n); } } @@ -161,7 +161,7 @@ int MovieAudioDecoder::synchronize_audio() return sample_skip; } -int MovieAudioDecoder::audio_decode_frame(AVFrame *frame) +int MovieAudioDecoder::audio_decode_frame(AVFrame *frame, int &sample_skip) { AVPacket *pkt = &mPacket; @@ -191,7 +191,7 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame) if(!mDataBuf || mDataBufLen < frame->nb_samples) { av_freep(&mDataBuf); - if(av_samples_alloc(&mDataBuf, NULL, mAVStream->codec->channels, + if(av_samples_alloc(&mDataBuf, NULL, av_get_channel_layout_nb_channels(mOutputChannelLayout), frame->nb_samples, mOutputSampleFormat, 0) < 0) break; else @@ -212,8 +212,8 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame) (double)mAVStream->codec->sample_rate; /* We have data, return it and come back for more later */ - return frame->nb_samples * mAVStream->codec->channels * - av_get_bytes_per_sample(mAVStream->codec->sample_fmt); + return frame->nb_samples * av_get_channel_layout_nb_channels(mOutputChannelLayout) * + av_get_bytes_per_sample(mOutputSampleFormat); } av_free_packet(pkt); @@ -221,6 +221,18 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame) if(mVideoState->audioq.get(pkt, mVideoState) < 0) return -1; + if(pkt->data == mVideoState->mFlushPktData) + { + avcodec_flush_buffers(mAVStream->codec); + mAudioDiffAccum = 0.0; + mAudioDiffAvgCount = 0; + mAudioClock = av_q2d(mAVStream->time_base)*pkt->pts; + sample_skip = 0; + + if(mVideoState->audioq.get(pkt, mVideoState) < 0) + return -1; + } + /* if update, update the audio clock w/pts */ if((uint64_t)pkt->pts != AV_NOPTS_VALUE) mAudioClock = av_q2d(mAVStream->time_base)*pkt->pts; @@ -229,6 +241,16 @@ int MovieAudioDecoder::audio_decode_frame(AVFrame *frame) size_t MovieAudioDecoder::read(char *stream, size_t len) { + if (mVideoState->mPaused) + { + // fill the buffer with silence + size_t sampleSize = av_get_bytes_per_sample(mOutputSampleFormat); + char* data[1]; + data[0] = stream; + av_samples_set_silence((uint8_t**)data, 0, len/sampleSize, 1, mOutputSampleFormat); + return len; + } + int sample_skip = synchronize_audio(); size_t total = 0; @@ -237,7 +259,7 @@ size_t MovieAudioDecoder::read(char *stream, size_t len) if(mFramePos >= mFrameSize) { /* We have already sent all our data; get more */ - mFrameSize = audio_decode_frame(mFrame); + mFrameSize = audio_decode_frame(mFrame, sample_skip); if(mFrameSize < 0) { /* If error, we're done */ @@ -260,8 +282,8 @@ size_t MovieAudioDecoder::read(char *stream, size_t len) { len1 = std::min(len1, -mFramePos); - int n = av_get_bytes_per_sample(mAVStream->codec->sample_fmt) * - mAVStream->codec->channels; + int n = av_get_bytes_per_sample(mOutputSampleFormat) + * av_get_channel_layout_nb_channels(mOutputChannelLayout); /* add samples by copying the first sample*/ if(n == 1) diff --git a/extern/ogre-ffmpeg-videoplayer/audiodecoder.hpp b/extern/ogre-ffmpeg-videoplayer/audiodecoder.hpp index 88406d51d..b05b16d42 100644 --- a/extern/ogre-ffmpeg-videoplayer/audiodecoder.hpp +++ b/extern/ogre-ffmpeg-videoplayer/audiodecoder.hpp @@ -77,7 +77,8 @@ private: * skip (negative means to duplicate). */ int synchronize_audio(); - int audio_decode_frame(AVFrame *frame); + /// @param sample_skip If seeking happened, the sample_skip variable will be reset to 0. + int audio_decode_frame(AVFrame *frame, int &sample_skip); public: MovieAudioDecoder(VideoState *is); @@ -101,6 +102,8 @@ public: virtual double getAudioClock(); /// This is the main interface to be used by the user's audio library. + /// @par Request filling the \a stream with \a len number of bytes. + /// @return The number of bytes read (may not be the requested number if we arrived at the end of the audio stream) size_t read(char *stream, size_t len); }; diff --git a/extern/ogre-ffmpeg-videoplayer/videoplayer.cpp b/extern/ogre-ffmpeg-videoplayer/videoplayer.cpp index 434b676ee..d80449199 100644 --- a/extern/ogre-ffmpeg-videoplayer/videoplayer.cpp +++ b/extern/ogre-ffmpeg-videoplayer/videoplayer.cpp @@ -38,19 +38,17 @@ void VideoPlayer::playVideo(const std::string &resourceName) } } -void VideoPlayer::update () +bool VideoPlayer::update () { if(mState) - { - if(!mState->update()) - close(); - } + return mState->update(); + return false; } std::string VideoPlayer::getTextureName() { std::string name; - if (mState) + if (mState && !mState->mTexture.isNull()) name = mState->mTexture->getName(); return name; } @@ -58,7 +56,7 @@ std::string VideoPlayer::getTextureName() int VideoPlayer::getVideoWidth() { int width=0; - if (mState) + if (mState && !mState->mTexture.isNull()) width = mState->mTexture->getWidth(); return width; } @@ -66,7 +64,7 @@ int VideoPlayer::getVideoWidth() int VideoPlayer::getVideoHeight() { int height=0; - if (mState) + if (mState && !mState->mTexture.isNull()) height = mState->mTexture->getHeight(); return height; } @@ -82,14 +80,48 @@ void VideoPlayer::close() } } -bool VideoPlayer::isPlaying () +bool VideoPlayer::hasAudioStream() { - return mState != NULL; + return mState && mState->audio_st != NULL; } -bool VideoPlayer::hasAudioStream() +void VideoPlayer::play() { - return mState && mState->audio_st != NULL; + if (mState) + mState->setPaused(false); +} + +void VideoPlayer::pause() +{ + if (mState) + mState->setPaused(true); +} + +bool VideoPlayer::isPaused() +{ + if (mState) + return mState->mPaused; + return true; +} + +double VideoPlayer::getCurrentTime() +{ + if (mState) + return mState->get_master_clock(); + return 0.0; +} + +void VideoPlayer::seek(double time) +{ + if (mState) + mState->seekTo(time); +} + +double VideoPlayer::getDuration() +{ + if (mState) + return mState->getDuration(); + return 0.0; } } diff --git a/extern/ogre-ffmpeg-videoplayer/videoplayer.hpp b/extern/ogre-ffmpeg-videoplayer/videoplayer.hpp index 750ad02e5..2727ac6f0 100644 --- a/extern/ogre-ffmpeg-videoplayer/videoplayer.hpp +++ b/extern/ogre-ffmpeg-videoplayer/videoplayer.hpp @@ -29,16 +29,29 @@ namespace Video bool hasAudioStream(); /// Play the given video. If a video is already playing, the old video is closed first. + /// @note The video will be unpaused by default. Use the pause() and play() methods to control pausing. void playVideo (const std::string& resourceName); + /// Get the current playback time position in the video, in seconds + double getCurrentTime(); + + /// Get the duration of the video in seconds + double getDuration(); + + /// Seek to the specified time position in the video + void seek(double time); + + void play(); + void pause(); + bool isPaused(); + /// This should be called every frame by the user to update the video texture. - void update(); + /// @return Returns true if the video is still playing, false if we have reached the end of the video stream. + bool update(); /// Stop the currently playing video, if a video is playing. void close(); - bool isPlaying(); - /// Return the texture name of the currently playing video, or "" if no video is playing. std::string getTextureName(); /// Return the width of the currently playing video, or 0 if no video is playing. diff --git a/extern/ogre-ffmpeg-videoplayer/videostate.cpp b/extern/ogre-ffmpeg-videoplayer/videostate.cpp index c77723421..7ac7a122c 100644 --- a/extern/ogre-ffmpeg-videoplayer/videostate.cpp +++ b/extern/ogre-ffmpeg-videoplayer/videostate.cpp @@ -31,6 +31,18 @@ extern "C" } +static const char* flushString = "FLUSH"; +struct FlushPacket : AVPacket +{ + FlushPacket() + : AVPacket() + { + data = ( (uint8_t*)flushString); + } +}; + +static FlushPacket flush_pkt; + #include "videoplayer.hpp" #include "audiodecoder.hpp" #include "audiofactory.hpp" @@ -46,14 +58,18 @@ namespace Video VideoState::VideoState() : format_ctx(NULL), av_sync_type(AV_SYNC_DEFAULT) - , external_clock_base(0.0) , audio_st(NULL) , video_st(NULL), frame_last_pts(0.0) , video_clock(0.0), sws_context(NULL), rgbaFrame(NULL), pictq_size(0) , pictq_rindex(0), pictq_windex(0) - , quit(false) + , mQuit(false), mPaused(false) , mAudioFactory(NULL) + , mSeekRequested(false) + , mSeekPos(0) + , mVideoEnded(false) { + mFlushPktData = flush_pkt.data; + // Register all formats and codecs av_register_all(); } @@ -77,7 +93,7 @@ void PacketQueue::put(AVPacket *pkt) pkt1->pkt = *pkt; pkt1->next = NULL; - if(pkt1->pkt.destruct == NULL) + if(pkt->data != flush_pkt.data && pkt1->pkt.destruct == NULL) { if(av_dup_packet(&pkt1->pkt) < 0) { @@ -104,7 +120,7 @@ void PacketQueue::put(AVPacket *pkt) int PacketQueue::get(AVPacket *pkt, VideoState *is) { boost::unique_lock lock(this->mutex); - while(!is->quit) + while(!is->mQuit) { AVPacketList *pkt1 = this->first_pkt; if(pkt1) @@ -143,7 +159,8 @@ void PacketQueue::clear() for(pkt = this->first_pkt; pkt != NULL; pkt = pkt1) { pkt1 = pkt->next; - av_free_packet(&pkt->pkt); + if (pkt->pkt.data != flush_pkt.data) + av_free_packet(&pkt->pkt); av_freep(&pkt); } this->last_pkt = NULL; @@ -205,6 +222,7 @@ void VideoState::video_display(VideoPicture *vp) void VideoState::video_refresh() { + boost::mutex::scoped_lock lock(this->pictq_mutex); if(this->pictq_size == 0) return; @@ -212,12 +230,11 @@ void VideoState::video_refresh() { VideoPicture* vp = &this->pictq[this->pictq_rindex]; this->video_display(vp); + this->pictq_rindex = (pictq_rindex+1) % VIDEO_PICTURE_QUEUE_SIZE; this->frame_last_pts = vp->pts; - this->pictq_mutex.lock(); this->pictq_size--; this->pictq_cond.notify_one(); - this->pictq_mutex.unlock(); } else { @@ -236,19 +253,18 @@ void VideoState::video_refresh() break; } + assert (this->pictq_rindex < VIDEO_PICTURE_QUEUE_SIZE); VideoPicture* vp = &this->pictq[this->pictq_rindex]; this->video_display(vp); this->frame_last_pts = vp->pts; - this->pictq_mutex.lock(); this->pictq_size -= i; // update queue for next picture this->pictq_size--; - this->pictq_rindex++; + this->pictq_rindex = (this->pictq_rindex+1) % VIDEO_PICTURE_QUEUE_SIZE; this->pictq_cond.notify_one(); - this->pictq_mutex.unlock(); } } @@ -260,12 +276,14 @@ int VideoState::queue_picture(AVFrame *pFrame, double pts) /* wait until we have a new pic */ { boost::unique_lock lock(this->pictq_mutex); - while(this->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !this->quit) + while(this->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !this->mQuit) this->pictq_cond.timed_wait(lock, boost::posix_time::milliseconds(1)); } - if(this->quit) + if(this->mQuit) return -1; + this->pictq_mutex.lock(); + // windex is set to 0 initially vp = &this->pictq[this->pictq_windex]; @@ -292,7 +310,6 @@ int VideoState::queue_picture(AVFrame *pFrame, double pts) // now we inform our display thread that we have a pic ready this->pictq_windex = (this->pictq_windex+1) % VIDEO_PICTURE_QUEUE_SIZE; - this->pictq_mutex.lock(); this->pictq_size++; this->pictq_mutex.unlock(); @@ -353,6 +370,21 @@ void VideoState::video_thread_loop(VideoState *self) while(self->videoq.get(packet, self) >= 0) { + if(packet->data == flush_pkt.data) + { + avcodec_flush_buffers((*self->video_st)->codec); + + self->pictq_mutex.lock(); + self->pictq_size = 0; + self->pictq_rindex = 0; + self->pictq_windex = 0; + self->pictq_mutex.unlock(); + + self->frame_last_pts = packet->pts * av_q2d((*self->video_st)->time_base); + global_video_pkt_pts = self->frame_last_pts; + continue; + } + // Save global pts to be stored in pFrame global_video_pkt_pts = packet->pts; // Decode video frame @@ -394,8 +426,67 @@ void VideoState::decode_thread_loop(VideoState *self) throw std::runtime_error("No streams to decode"); // main decode loop - while(!self->quit) + while(!self->mQuit) { + if(self->mSeekRequested) + { + uint64_t seek_target = self->mSeekPos; + int streamIndex = -1; + + int videoStreamIndex = -1;; + int audioStreamIndex = -1; + if (self->video_st) + videoStreamIndex = self->video_st - self->format_ctx->streams; + if (self->audio_st) + audioStreamIndex = self->audio_st - self->format_ctx->streams; + + if(videoStreamIndex >= 0) + streamIndex = videoStreamIndex; + else if(audioStreamIndex >= 0) + streamIndex = audioStreamIndex; + + uint64_t timestamp = seek_target; + + // QtCreator's highlighter doesn't like AV_TIME_BASE_Q's {} initializer for some reason + AVRational avTimeBaseQ = AVRational(); // = AV_TIME_BASE_Q; + avTimeBaseQ.num = 1; + avTimeBaseQ.den = AV_TIME_BASE; + + if(streamIndex >= 0) + timestamp = av_rescale_q(seek_target, avTimeBaseQ, self->format_ctx->streams[streamIndex]->time_base); + + // AVSEEK_FLAG_BACKWARD appears to be needed, otherwise ffmpeg may seek to a keyframe *after* the given time + // we want to seek to any keyframe *before* the given time, so we can continue decoding as normal from there on + if(av_seek_frame(self->format_ctx, streamIndex, timestamp, AVSEEK_FLAG_BACKWARD) < 0) + std::cerr << "Error seeking " << self->format_ctx->filename << std::endl; + else + { + // Clear the packet queues and put a special packet with the new clock time + if(audioStreamIndex >= 0) + { + self->audioq.clear(); + flush_pkt.pts = av_rescale_q(seek_target, avTimeBaseQ, + self->format_ctx->streams[audioStreamIndex]->time_base); + self->audioq.put(&flush_pkt); + } + if(videoStreamIndex >= 0) + { + self->videoq.clear(); + flush_pkt.pts = av_rescale_q(seek_target, avTimeBaseQ, + self->format_ctx->streams[videoStreamIndex]->time_base); + self->videoq.put(&flush_pkt); + } + self->pictq_mutex.lock(); + self->pictq_size = 0; + self->pictq_rindex = 0; + self->pictq_windex = 0; + self->pictq_mutex.unlock(); + self->mExternalClock.set(seek_target); + } + self->mSeekRequested = false; + } + + if((self->audio_st && self->audioq.size > MAX_AUDIOQ_SIZE) || (self->video_st && self->videoq.size > MAX_VIDEOQ_SIZE)) { @@ -404,7 +495,13 @@ void VideoState::decode_thread_loop(VideoState *self) } if(av_read_frame(pFormatCtx, packet) < 0) - break; + { + if (self->audioq.nb_packets == 0 && self->videoq.nb_packets == 0 && self->pictq_size == 0) + self->mVideoEnded = true; + continue; + } + else + self->mVideoEnded = false; // Is this a packet from the video stream? if(self->video_st && packet->stream_index == self->video_st-pFormatCtx->streams) @@ -414,17 +511,6 @@ void VideoState::decode_thread_loop(VideoState *self) else av_free_packet(packet); } - - /* all done - wait for it */ - self->videoq.flush(); - self->audioq.flush(); - while(!self->quit) - { - // EOF reached, all packets processed, we can exit now - if(self->audioq.nb_packets == 0 && self->videoq.nb_packets == 0 && self->pictq_size == 0) - break; - boost::this_thread::sleep(boost::posix_time::milliseconds(100)); - } } catch(std::runtime_error& e) { std::cerr << "An error occured playing the video: " << e.what () << std::endl; @@ -433,17 +519,14 @@ void VideoState::decode_thread_loop(VideoState *self) std::cerr << "An error occured playing the video: " << e.getFullDescription () << std::endl; } - self->quit = true; + self->mQuit = true; } bool VideoState::update() { - if(this->quit) - return false; - this->video_refresh(); - return true; + return !this->mVideoEnded; } @@ -510,7 +593,7 @@ void VideoState::init(const std::string& resourceName) unsigned int i; this->av_sync_type = AV_SYNC_DEFAULT; - this->quit = false; + this->mQuit = false; this->stream = Ogre::ResourceGroupManager::getSingleton().openResource(resourceName); if(this->stream.isNull()) @@ -564,7 +647,7 @@ void VideoState::init(const std::string& resourceName) audio_index = i; } - this->external_clock_base = av_gettime(); + mExternalClock.set(0); if(audio_index >= 0) this->stream_open(audio_index, this->format_ctx); @@ -598,12 +681,12 @@ void VideoState::init(const std::string& resourceName) void VideoState::deinit() { - this->quit = true; + this->mQuit = true; - mAudioDecoder.reset(); + this->audioq.flush(); + this->videoq.flush(); - this->audioq.cond.notify_one(); - this->videoq.cond.notify_one(); + mAudioDecoder.reset(); if (this->parse_thread.joinable()) this->parse_thread.join(); @@ -643,7 +726,7 @@ void VideoState::deinit() double VideoState::get_external_clock() { - return ((uint64_t)av_gettime()-this->external_clock_base) / 1000000.0; + return mExternalClock.get() / 1000000.0; } double VideoState::get_master_clock() @@ -667,5 +750,62 @@ double VideoState::get_audio_clock() return mAudioDecoder->getAudioClock(); } +void VideoState::setPaused(bool isPaused) +{ + this->mPaused = isPaused; + mExternalClock.setPaused(isPaused); +} + +void VideoState::seekTo(double time) +{ + time = std::max(0.0, time); + time = std::min(getDuration(), time); + mSeekPos = (uint64_t) (time * AV_TIME_BASE); + mSeekRequested = true; +} + +double VideoState::getDuration() +{ + return this->format_ctx->duration / 1000000.0; +} + + +ExternalClock::ExternalClock() + : mTimeBase(av_gettime()) + , mPausedAt(0) + , mPaused(false) +{ +} + +void ExternalClock::setPaused(bool paused) +{ + boost::mutex::scoped_lock lock(mMutex); + if (mPaused == paused) + return; + if (paused) + { + mPausedAt = av_gettime() - mTimeBase; + } + else + mTimeBase = av_gettime() - mPausedAt; + mPaused = paused; +} + +uint64_t ExternalClock::get() +{ + boost::mutex::scoped_lock lock(mMutex); + if (mPaused) + return mPausedAt; + else + return av_gettime() - mTimeBase; +} + +void ExternalClock::set(uint64_t time) +{ + boost::mutex::scoped_lock lock(mMutex); + mTimeBase = av_gettime() - time; + mPausedAt = time; +} + } diff --git a/extern/ogre-ffmpeg-videoplayer/videostate.hpp b/extern/ogre-ffmpeg-videoplayer/videostate.hpp index 90ebec0a3..cdeb2d0e3 100644 --- a/extern/ogre-ffmpeg-videoplayer/videostate.hpp +++ b/extern/ogre-ffmpeg-videoplayer/videostate.hpp @@ -27,6 +27,21 @@ struct VideoState; class MovieAudioFactory; class MovieAudioDecoder; +struct ExternalClock +{ + ExternalClock(); + + uint64_t mTimeBase; + uint64_t mPausedAt; + bool mPaused; + + boost::mutex mMutex; + + void setPaused(bool paused); + uint64_t get(); + void set(uint64_t time); +}; + struct PacketQueue { PacketQueue() : first_pkt(NULL), last_pkt(NULL), flushing(false), nb_packets(0), size(0) @@ -66,6 +81,11 @@ struct VideoState { void init(const std::string& resourceName); void deinit(); + void setPaused(bool isPaused); + void seekTo(double time); + + double getDuration(); + int stream_open(int stream_index, AVFormatContext *pFormatCtx); bool update(); @@ -93,15 +113,18 @@ struct VideoState { MovieAudioFactory* mAudioFactory; boost::shared_ptr mAudioDecoder; + ExternalClock mExternalClock; + Ogre::DataStreamPtr stream; AVFormatContext* format_ctx; int av_sync_type; - uint64_t external_clock_base; AVStream** audio_st; PacketQueue audioq; + uint8_t* mFlushPktData; + AVStream** video_st; double frame_last_pts; double video_clock; /// Date: Sat, 25 Oct 2014 00:14:51 +0200 Subject: [PATCH 3/4] Add possibly missing include for av_rescale_q --- extern/ogre-ffmpeg-videoplayer/videostate.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/extern/ogre-ffmpeg-videoplayer/videostate.cpp b/extern/ogre-ffmpeg-videoplayer/videostate.cpp index 7ac7a122c..cc6308b14 100644 --- a/extern/ogre-ffmpeg-videoplayer/videostate.cpp +++ b/extern/ogre-ffmpeg-videoplayer/videostate.cpp @@ -25,6 +25,8 @@ extern "C" #include #endif + #include + #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1) #define av_frame_alloc avcodec_alloc_frame #endif From dbe30e31b935dbdf60136e3121b7b67cd17d93fb Mon Sep 17 00:00:00 2001 From: scrawl Date: Sat, 25 Oct 2014 00:56:43 +0200 Subject: [PATCH 4/4] Make creature's model take priority over base_anim.nif (Fixes #2055) --- apps/openmw/mwrender/creatureanimation.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/openmw/mwrender/creatureanimation.cpp b/apps/openmw/mwrender/creatureanimation.cpp index 247a0ba14..fef9fa644 100644 --- a/apps/openmw/mwrender/creatureanimation.cpp +++ b/apps/openmw/mwrender/creatureanimation.cpp @@ -27,9 +27,9 @@ CreatureAnimation::CreatureAnimation(const MWWorld::Ptr &ptr) setObjectRoot(model, false); setRenderProperties(mObjectRoot, RV_Actors, RQG_Main, RQG_Alpha); - addAnimSource(model); if((ref->mBase->mFlags&ESM::Creature::Bipedal)) addAnimSource("meshes\\base_anim.nif"); + addAnimSource(model); } } @@ -47,9 +47,9 @@ CreatureWeaponAnimation::CreatureWeaponAnimation(const MWWorld::Ptr &ptr) setObjectRoot(model, false); setRenderProperties(mObjectRoot, RV_Actors, RQG_Main, RQG_Alpha); - addAnimSource(model); if((ref->mBase->mFlags&ESM::Creature::Bipedal)) addAnimSource("meshes\\base_anim.nif"); + addAnimSource(model); mPtr.getClass().getInventoryStore(mPtr).setListener(this, mPtr);